summaryrefslogtreecommitdiff
path: root/ironic_python_agent
diff options
context:
space:
mode:
Diffstat (limited to 'ironic_python_agent')
-rw-r--r--ironic_python_agent/__init__.py15
-rw-r--r--ironic_python_agent/agent.py251
-rw-r--r--ironic_python_agent/api/__init__.py15
-rw-r--r--ironic_python_agent/api/app.py63
-rw-r--r--ironic_python_agent/api/app.wsgi0
-rw-r--r--ironic_python_agent/api/config.py39
-rw-r--r--ironic_python_agent/api/controllers/__init__.py15
-rw-r--r--ironic_python_agent/api/controllers/root.py96
-rw-r--r--ironic_python_agent/api/controllers/v1/__init__.py118
-rw-r--r--ironic_python_agent/api/controllers/v1/base.py73
-rw-r--r--ironic_python_agent/api/controllers/v1/command.py89
-rw-r--r--ironic_python_agent/api/controllers/v1/link.py43
-rw-r--r--ironic_python_agent/api/controllers/v1/status.py44
-rw-r--r--ironic_python_agent/base.py137
-rw-r--r--ironic_python_agent/cmd/__init__.py15
-rw-r--r--ironic_python_agent/cmd/agent.py55
-rw-r--r--ironic_python_agent/configdrive.py80
-rw-r--r--ironic_python_agent/decom.py22
-rw-r--r--ironic_python_agent/decorators.py42
-rw-r--r--ironic_python_agent/encoding.py53
-rw-r--r--ironic_python_agent/errors.py179
-rw-r--r--ironic_python_agent/hardware.py183
-rw-r--r--ironic_python_agent/openstack/__init__.py0
-rw-r--r--ironic_python_agent/openstack/common/__init__.py17
-rw-r--r--ironic_python_agent/openstack/common/gettextutils.py448
-rw-r--r--ironic_python_agent/openstack/common/importutils.py73
-rw-r--r--ironic_python_agent/openstack/common/jsonutils.py174
-rw-r--r--ironic_python_agent/openstack/common/local.py45
-rw-r--r--ironic_python_agent/openstack/common/log.py712
-rw-r--r--ironic_python_agent/openstack/common/timeutils.py210
-rw-r--r--ironic_python_agent/overlord_agent_api.py105
-rw-r--r--ironic_python_agent/shell/__init__.py15
-rwxr-xr-xironic_python_agent/shell/copy_configdrive_to_disk.sh46
-rw-r--r--ironic_python_agent/shell/reboot.sh8
-rwxr-xr-xironic_python_agent/shell/write_image.sh40
-rw-r--r--ironic_python_agent/standby.py201
-rw-r--r--ironic_python_agent/tests/__init__.py15
-rw-r--r--ironic_python_agent/tests/agent.py223
-rw-r--r--ironic_python_agent/tests/api.py257
-rw-r--r--ironic_python_agent/tests/configdrive.py165
-rw-r--r--ironic_python_agent/tests/decom.py27
-rw-r--r--ironic_python_agent/tests/hardware.py67
-rw-r--r--ironic_python_agent/tests/overlord_agent_api.py171
-rw-r--r--ironic_python_agent/tests/standby.py312
-rw-r--r--ironic_python_agent/utils.py25
45 files changed, 4983 insertions, 0 deletions
diff --git a/ironic_python_agent/__init__.py b/ironic_python_agent/__init__.py
new file mode 100644
index 00000000..13e76de9
--- /dev/null
+++ b/ironic_python_agent/__init__.py
@@ -0,0 +1,15 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/ironic_python_agent/agent.py b/ironic_python_agent/agent.py
new file mode 100644
index 00000000..261a4877
--- /dev/null
+++ b/ironic_python_agent/agent.py
@@ -0,0 +1,251 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import random
+import threading
+import time
+
+import pkg_resources
+from stevedore import driver
+from wsgiref import simple_server
+
+from teeth_agent.api import app
+from teeth_agent import base
+from teeth_agent import encoding
+from teeth_agent import errors
+from teeth_agent import hardware
+from teeth_agent.openstack.common import log
+from teeth_agent import overlord_agent_api
+from teeth_agent import utils
+
+
+def _time():
+ """Wraps time.time() for simpler testing."""
+ return time.time()
+
+
+class TeethAgentStatus(encoding.Serializable):
+ def __init__(self, mode, started_at, version):
+ self.mode = mode
+ self.started_at = started_at
+ self.version = version
+
+ def serialize(self):
+ """Turn the status into a dict."""
+ return utils.get_ordereddict([
+ ('mode', self.mode),
+ ('started_at', self.started_at),
+ ('version', self.version),
+ ])
+
+
+class TeethAgentHeartbeater(threading.Thread):
+ # If we could wait at most N seconds between heartbeats (or in case of an
+ # error) we will instead wait r x N seconds, where r is a random value
+ # between these multipliers.
+ min_jitter_multiplier = 0.3
+ max_jitter_multiplier = 0.6
+
+ # Exponential backoff values used in case of an error. In reality we will
+ # only wait a portion of either of these delays based on the jitter
+ # multipliers.
+ initial_delay = 1.0
+ max_delay = 300.0
+ backoff_factor = 2.7
+
+ def __init__(self, agent):
+ super(TeethAgentHeartbeater, self).__init__()
+ self.agent = agent
+ self.hardware = hardware.get_manager()
+ self.api = overlord_agent_api.APIClient(agent.api_url)
+ self.log = log.getLogger(__name__)
+ self.stop_event = threading.Event()
+ self.error_delay = self.initial_delay
+
+ def run(self):
+ # The first heartbeat happens now
+ self.log.info('starting heartbeater')
+ interval = 0
+
+ while not self.stop_event.wait(interval):
+ next_heartbeat_by = self.do_heartbeat()
+ interval_multiplier = random.uniform(self.min_jitter_multiplier,
+ self.max_jitter_multiplier)
+ interval = (next_heartbeat_by - _time()) * interval_multiplier
+ log_msg = 'sleeping before next heartbeat, interval: {0}'
+ self.log.info(log_msg.format(interval))
+
+ def do_heartbeat(self):
+ try:
+ deadline = self.api.heartbeat(
+ uuid=self.agent.get_node_uuid(),
+ advertise_address=self.agent.advertise_address
+ )
+ self.error_delay = self.initial_delay
+ self.log.info('heartbeat successful')
+ except Exception:
+ self.log.exception('error sending heartbeat')
+ deadline = _time() + self.error_delay
+ self.error_delay = min(self.error_delay * self.backoff_factor,
+ self.max_delay)
+ pass
+
+ return deadline
+
+ def stop(self):
+ self.log.info('stopping heartbeater')
+ self.stop_event.set()
+ return self.join()
+
+
+class TeethAgent(object):
+ def __init__(self, api_url, advertise_address, listen_address):
+ self.api_url = api_url
+ self.api_client = overlord_agent_api.APIClient(self.api_url)
+ self.listen_address = listen_address
+ self.advertise_address = advertise_address
+ self.mode_implementation = None
+ self.version = pkg_resources.get_distribution('teeth-agent').version
+ self.api = app.VersionSelectorApplication(self)
+ self.command_results = utils.get_ordereddict()
+ self.heartbeater = TeethAgentHeartbeater(self)
+ self.hardware = hardware.get_manager()
+ self.command_lock = threading.Lock()
+ self.log = log.getLogger(__name__)
+ self.started_at = None
+ self.node = None
+
+ def get_mode_name(self):
+ if self.mode_implementation:
+ return self.mode_implementation.name
+ else:
+ return 'NONE'
+
+ def get_status(self):
+ """Retrieve a serializable status."""
+ return TeethAgentStatus(
+ mode=self.get_mode_name(),
+ started_at=self.started_at,
+ version=self.version
+ )
+
+ def get_agent_mac_addr(self):
+ return self.hardware.get_primary_mac_address()
+
+ def get_node_uuid(self):
+ if 'uuid' not in self.node:
+ errors.HeartbeatError('Tried to heartbeat without node UUID.')
+ return self.node['uuid']
+
+ def list_command_results(self):
+ return self.command_results.values()
+
+ def get_command_result(self, result_id):
+ try:
+ return self.command_results[result_id]
+ except KeyError:
+ raise errors.RequestedObjectNotFoundError('Command Result',
+ result_id)
+
+ def _split_command(self, command_name):
+ command_parts = command_name.split('.', 1)
+ if len(command_parts) != 2:
+ raise errors.InvalidCommandError(
+ 'Command name must be of the form <mode>.<name>')
+
+ return (command_parts[0], command_parts[1])
+
+ def _verify_mode(self, mode_name, command_name):
+ if not self.mode_implementation:
+ try:
+ self.mode_implementation = _load_mode_implementation(mode_name)
+ except Exception:
+ raise errors.InvalidCommandError(
+ 'Unknown mode: {0}'.format(mode_name))
+ elif self.get_mode_name().lower() != mode_name:
+ raise errors.InvalidCommandError(
+ 'Agent is already in {0} mode'.format(self.get_mode_name()))
+
+ def execute_command(self, command_name, **kwargs):
+ """Execute an agent command."""
+ with self.command_lock:
+ mode_part, command_part = self._split_command(command_name)
+ self._verify_mode(mode_part, command_part)
+
+ if len(self.command_results) > 0:
+ last_command = self.command_results.values()[-1]
+ if not last_command.is_done():
+ raise errors.CommandExecutionError('agent is busy')
+
+ try:
+ result = self.mode_implementation.execute(command_part,
+ **kwargs)
+ except errors.InvalidContentError as e:
+ # Any command may raise a InvalidContentError which will be
+ # returned to the caller directly.
+ raise e
+ except Exception as e:
+ # Other errors are considered command execution errors, and are
+ # recorded as an
+ result = base.SyncCommandResult(command_name,
+ kwargs,
+ False,
+ unicode(e))
+
+ self.command_results[result.id] = result
+ return result
+
+ def run(self):
+ """Run the Teeth Agent."""
+ self.started_at = _time()
+ # Get the UUID so we can heartbeat to Ironic
+ self.node = self.api_client.lookup_node(
+ hardware_info=self.hardware.list_hardware_info(),
+ )
+ self.heartbeater.start()
+ wsgi = simple_server.make_server(
+ self.listen_address[0],
+ self.listen_address[1],
+ self.api,
+ server_class=simple_server.WSGIServer)
+
+ try:
+ wsgi.serve_forever()
+ except BaseException:
+ self.log.exception('shutting down')
+
+ self.heartbeater.stop()
+
+
+def _load_mode_implementation(mode_name):
+ mgr = driver.DriverManager(
+ namespace='teeth_agent.modes',
+ name=mode_name.lower(),
+ invoke_on_load=True,
+ invoke_args=[],
+ )
+ return mgr.driver
+
+
+def build_agent(api_url,
+ advertise_host,
+ advertise_port,
+ listen_host,
+ listen_port):
+
+ return TeethAgent(api_url,
+ (advertise_host, advertise_port),
+ (listen_host, listen_port))
diff --git a/ironic_python_agent/api/__init__.py b/ironic_python_agent/api/__init__.py
new file mode 100644
index 00000000..2a30de06
--- /dev/null
+++ b/ironic_python_agent/api/__init__.py
@@ -0,0 +1,15 @@
+"""
+Copyright 2014 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/ironic_python_agent/api/app.py b/ironic_python_agent/api/app.py
new file mode 100644
index 00000000..247001d4
--- /dev/null
+++ b/ironic_python_agent/api/app.py
@@ -0,0 +1,63 @@
+"""
+Copyright 2014 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import pecan
+from pecan import hooks
+
+from teeth_agent.api import config
+
+
+class AgentHook(hooks.PecanHook):
+ def __init__(self, agent, *args, **kwargs):
+ super(AgentHook, self).__init__(*args, **kwargs)
+ self.agent = agent
+
+ def before(self, state):
+ state.request.agent = self.agent
+
+
+def get_pecan_config():
+ # Set up the pecan configuration
+ filename = config.__file__.replace('.pyc', '.py')
+ return pecan.configuration.conf_from_file(filename)
+
+
+def setup_app(pecan_config=None, extra_hooks=None, agent=None):
+ app_hooks = [AgentHook(agent)]
+
+ if not pecan_config:
+ pecan_config = get_pecan_config()
+
+ pecan.configuration.set_config(dict(pecan_config), overwrite=True)
+
+ app = pecan.make_app(
+ pecan_config.app.root,
+ static_root=pecan_config.app.static_root,
+ debug=pecan_config.app.debug,
+ force_canonical=getattr(pecan_config.app, 'force_canonical', True),
+ hooks=app_hooks,
+ )
+
+ return app
+
+
+class VersionSelectorApplication(object):
+ def __init__(self, agent):
+ pc = get_pecan_config()
+ self.v1 = setup_app(pecan_config=pc, agent=agent)
+
+ def __call__(self, environ, start_response):
+ return self.v1(environ, start_response)
diff --git a/ironic_python_agent/api/app.wsgi b/ironic_python_agent/api/app.wsgi
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ironic_python_agent/api/app.wsgi
diff --git a/ironic_python_agent/api/config.py b/ironic_python_agent/api/config.py
new file mode 100644
index 00000000..8814a1c1
--- /dev/null
+++ b/ironic_python_agent/api/config.py
@@ -0,0 +1,39 @@
+"""
+Copyright 2014 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+# Server Specific Configurations
+# See https://pecan.readthedocs.org/en/latest/configuration.html#server-configuration # noqa
+server = {
+ 'port': '9999',
+ 'host': '0.0.0.0'
+}
+
+# Pecan Application Configurations
+# See https://pecan.readthedocs.org/en/latest/configuration.html#application-configuration # noqa
+app = {
+ 'root': 'teeth_agent.api.controllers.root.RootController',
+ 'modules': ['teeth_agent.api'],
+ 'static_root': '%(confdir)s/public',
+ 'debug': False,
+ 'enable_acl': True,
+ 'acl_public_routes': ['/', '/v1'],
+}
+
+# WSME Configurations
+# See https://wsme.readthedocs.org/en/latest/integrate.html#configuration
+wsme = {
+ 'debug': False,
+}
diff --git a/ironic_python_agent/api/controllers/__init__.py b/ironic_python_agent/api/controllers/__init__.py
new file mode 100644
index 00000000..2a30de06
--- /dev/null
+++ b/ironic_python_agent/api/controllers/__init__.py
@@ -0,0 +1,15 @@
+"""
+Copyright 2014 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/ironic_python_agent/api/controllers/root.py b/ironic_python_agent/api/controllers/root.py
new file mode 100644
index 00000000..4552e731
--- /dev/null
+++ b/ironic_python_agent/api/controllers/root.py
@@ -0,0 +1,96 @@
+# Copyright 2014 Rackspace, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+from pecan import rest
+
+from wsme import types as wtypes
+import wsmeext.pecan as wsme_pecan
+
+from teeth_agent.api.controllers import v1
+from teeth_agent.api.controllers.v1 import base
+from teeth_agent.api.controllers.v1 import link
+
+
+class Version(base.APIBase):
+ """An API version representation."""
+
+ id = wtypes.text
+ "The ID of the version, also acts as the release number"
+
+ links = [link.Link]
+ "A Link that point to a specific version of the API"
+
+ @classmethod
+ def convert(self, id):
+ version = Version()
+ version.id = id
+ version.links = [link.Link.make_link('self', pecan.request.host_url,
+ id, '', bookmark=True)]
+ return version
+
+
+class Root(base.APIBase):
+
+ name = wtypes.text
+ "The name of the API"
+
+ description = wtypes.text
+ "Some information about this API"
+
+ versions = [Version]
+ "Links to all the versions available in this API"
+
+ default_version = Version
+ "A link to the default version of the API"
+
+ @classmethod
+ def convert(self):
+ root = Root()
+ root.name = 'OpenStack Ironic Python Agent API'
+ root.description = ('Ironic Python Agent is a provisioning agent for '
+ 'OpenStack Ironic')
+ root.versions = [Version.convert('v1')]
+ root.default_version = Version.convert('v1')
+ return root
+
+
+class RootController(rest.RestController):
+
+ _versions = ['v1']
+ "All supported API versions"
+
+ _default_version = 'v1'
+ "The default API version"
+
+ v1 = v1.Controller()
+
+ @wsme_pecan.wsexpose(Root)
+ def get(self):
+ # NOTE: The reason why convert() it's being called for every
+ # request is because we need to get the host url from
+ # the request object to make the links.
+ return Root.convert()
+
+ @pecan.expose()
+ def _route(self, args):
+ """Overrides the default routing behavior.
+
+ It redirects the request to the default version of the ironic API
+ if the version number is not specified in the url.
+ """
+
+ if args[0] and args[0] not in self._versions:
+ args = [self._default_version] + args
+ return super(RootController, self)._route(args)
diff --git a/ironic_python_agent/api/controllers/v1/__init__.py b/ironic_python_agent/api/controllers/v1/__init__.py
new file mode 100644
index 00000000..a2bc16c7
--- /dev/null
+++ b/ironic_python_agent/api/controllers/v1/__init__.py
@@ -0,0 +1,118 @@
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Version 1 of the Ironic Python Agent API
+"""
+
+import pecan
+from pecan import rest
+
+from wsme import types as wtypes
+import wsmeext.pecan as wsme_pecan
+
+from teeth_agent.api.controllers.v1 import base
+from teeth_agent.api.controllers.v1 import command
+from teeth_agent.api.controllers.v1 import link
+from teeth_agent.api.controllers.v1 import status
+
+
+class MediaType(base.APIBase):
+ """A media type representation."""
+
+ base = wtypes.text
+ type = wtypes.text
+
+ def __init__(self, base, type):
+ self.base = base
+ self.type = type
+
+
+class V1(base.APIBase):
+ """The representation of the version 1 of the API."""
+
+ id = wtypes.text
+ "The ID of the version, also acts as the release number"
+
+ media_types = [MediaType]
+ "An array of supported media types for this version"
+
+ links = [link.Link]
+ "Links that point to a specific URL for this version and documentation"
+
+ commands = [link.Link]
+ "Links to the command resource"
+
+ status = [link.Link]
+ "Links to the status resource"
+
+ @classmethod
+ def convert(self):
+ v1 = V1()
+ v1.id = "v1"
+ v1.links = [
+ link.Link.make_link('self',
+ pecan.request.host_url,
+ 'v1',
+ '',
+ bookmark=True),
+ link.Link.make_link('describedby',
+ 'https://github.com',
+ 'rackerlabs',
+ 'teeth-agent',
+ bookmark=True,
+ type='text/html')
+ ]
+ v1.commands = [
+ link.Link.make_link('self',
+ pecan.request.host_url,
+ 'commands',
+ ''),
+ link.Link.make_link('bookmark',
+ pecan.request.host_url,
+ 'commands',
+ '',
+ bookmark=True)
+ ]
+ v1.status = [
+ link.Link.make_link('self',
+ pecan.request.host_url,
+ 'status',
+ ''),
+ link.Link.make_link('bookmark',
+ pecan.request.host_url,
+ 'status',
+ '',
+ bookmark=True)
+ ]
+ v1.media_types = [MediaType('application/json',
+ ('application/vnd.openstack.'
+ 'ironic-python-agent.v1+json'))]
+ return v1
+
+
+class Controller(rest.RestController):
+ """Version 1 API controller root."""
+
+ commands = command.CommandController()
+ status = status.StatusController()
+
+ @wsme_pecan.wsexpose(V1)
+ def get(self):
+ # NOTE: The reason why convert() it's being called for every
+ # request is because we need to get the host url from
+ # the request object to make the links.
+ return V1.convert()
+
+__all__ = (Controller)
diff --git a/ironic_python_agent/api/controllers/v1/base.py b/ironic_python_agent/api/controllers/v1/base.py
new file mode 100644
index 00000000..20af8964
--- /dev/null
+++ b/ironic_python_agent/api/controllers/v1/base.py
@@ -0,0 +1,73 @@
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import six
+from wsme import types as wtypes
+
+
+class ExceptionType(wtypes.UserType):
+ basetype = wtypes.DictType
+ name = 'exception'
+
+ def validate(self, value):
+ if not isinstance(value, BaseException):
+ raise ValueError('Value is not an exception')
+ return value
+
+ def tobasetype(self, value):
+ """Turn a RESTError into a dict."""
+ return {
+ 'type': value.__class__.__name__,
+ 'code': value.status_code,
+ 'message': value.message,
+ 'details': value.details,
+ }
+
+ frombasetype = tobasetype
+
+
+exception_type = ExceptionType()
+
+
+class MultiType(wtypes.UserType):
+ """A complex type that represents one or more types.
+
+ Used for validating that a value is an instance of one of the types.
+
+ :param *types: Variable-length list of types.
+
+ """
+ def __init__(self, *types):
+ self.types = types
+
+ def __str__(self):
+ return ' | '.join(map(str, self.types))
+
+ def validate(self, value):
+ for t in self.types:
+ if t is wtypes.text and isinstance(value, wtypes.bytes):
+ value = value.decode()
+ if isinstance(value, t):
+ return value
+ else:
+ raise ValueError(
+ "Wrong type. Expected '{type}', got '{value}'".format(
+ type=self.types, value=type(value)))
+
+
+json_type = MultiType(list, dict, six.integer_types, wtypes.text)
+
+
+class APIBase(wtypes.Base):
+ pass
diff --git a/ironic_python_agent/api/controllers/v1/command.py b/ironic_python_agent/api/controllers/v1/command.py
new file mode 100644
index 00000000..81100000
--- /dev/null
+++ b/ironic_python_agent/api/controllers/v1/command.py
@@ -0,0 +1,89 @@
+# Copyright 2014 Rackspace, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+from pecan import rest
+from wsme import types
+from wsmeext import pecan as wsme_pecan
+
+from teeth_agent.api.controllers.v1 import base
+
+
+class CommandResult(base.APIBase):
+ id = types.text
+ command_name = types.text
+ command_params = types.DictType(types.text, base.json_type)
+ command_status = types.text
+ command_error = base.exception_type
+ command_result = types.DictType(types.text, base.json_type)
+
+ @classmethod
+ def from_result(cls, result):
+ instance = cls()
+ for field in ('id', 'command_name', 'command_params', 'command_status',
+ 'command_error', 'command_result'):
+ setattr(instance, field, getattr(result, field))
+ return instance
+
+
+class CommandResultList(base.APIBase):
+ commands = [CommandResult]
+
+ @classmethod
+ def from_results(cls, results):
+ instance = cls()
+ instance.commands = [CommandResult.from_result(result)
+ for result in results]
+ return instance
+
+
+class Command(base.APIBase):
+ """A command representation."""
+ name = types.wsattr(types.text, mandatory=True)
+ params = types.wsattr(base.MultiType(dict), mandatory=True)
+
+
+class CommandController(rest.RestController):
+ """Controller for issuing commands and polling for command status."""
+
+ @wsme_pecan.wsexpose(CommandResultList)
+ def get_all(self):
+ agent = pecan.request.agent
+ results = agent.list_command_results()
+ return CommandResultList.from_results(results)
+
+ @wsme_pecan.wsexpose(CommandResult, types.text, types.text)
+ def get_one(self, result_id, wait=False):
+ agent = pecan.request.agent
+ result = agent.get_command_result(result_id)
+
+ if wait and wait.lower() == 'true':
+ result.join()
+
+ return CommandResult.from_result(result)
+
+ @wsme_pecan.wsexpose(CommandResult, body=Command)
+ def post(self, wait=False, command=None):
+ # the POST body is always the last arg,
+ # so command must be a kwarg here
+ if command is None:
+ command = Command()
+ agent = pecan.request.agent
+ result = agent.execute_command(command.name, **command.params)
+
+ if wait and wait.lower() == 'true':
+ result.join()
+
+ return result
diff --git a/ironic_python_agent/api/controllers/v1/link.py b/ironic_python_agent/api/controllers/v1/link.py
new file mode 100644
index 00000000..987eb386
--- /dev/null
+++ b/ironic_python_agent/api/controllers/v1/link.py
@@ -0,0 +1,43 @@
+# Copyright 2014 Rackspace, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from wsme import types as wtypes
+
+from teeth_agent.api.controllers.v1 import base
+
+
+class Link(base.APIBase):
+ """A link representation."""
+
+ href = wtypes.text
+ "The url of a link."
+
+ rel = wtypes.text
+ "The name of a link."
+
+ type = wtypes.text
+ "Indicates the type of document/link."
+
+ @classmethod
+ def make_link(cls, rel_name, url, resource, resource_args,
+ bookmark=False, type=wtypes.Unset):
+ template = '%s/%s' if bookmark else '%s/v1/%s'
+ # FIXME(lucasagomes): I'm getting a 404 when doing a GET on
+ # a nested resource that the URL ends with a '/'.
+ # https://groups.google.com/forum/#!topic/pecan-dev/QfSeviLg5qs
+ template += '%s' if resource_args.startswith('?') else '/%s'
+
+ return Link(href=(template) % (url, resource, resource_args),
+ rel=rel_name, type=type)
diff --git a/ironic_python_agent/api/controllers/v1/status.py b/ironic_python_agent/api/controllers/v1/status.py
new file mode 100644
index 00000000..c631ee59
--- /dev/null
+++ b/ironic_python_agent/api/controllers/v1/status.py
@@ -0,0 +1,44 @@
+# Copyright 2014 Rackspace, Inc.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import pecan
+from pecan import rest
+from wsme import types
+from wsmeext import pecan as wsme_pecan
+
+from teeth_agent.api.controllers.v1 import base
+
+
+class AgentStatus(base.APIBase):
+ mode = types.text
+ started_at = base.MultiType(float)
+ version = types.text
+
+ @classmethod
+ def from_agent_status(cls, status):
+ instance = cls()
+ for field in ('mode', 'started_at', 'version'):
+ setattr(instance, field, getattr(status, field))
+ return instance
+
+
+class StatusController(rest.RestController):
+ """Controller for getting agent status."""
+
+ @wsme_pecan.wsexpose(AgentStatus)
+ def get_all(self):
+ agent = pecan.request.agent
+ status = agent.get_status()
+ return AgentStatus.from_agent_status(status)
diff --git a/ironic_python_agent/base.py b/ironic_python_agent/base.py
new file mode 100644
index 00000000..2e432f78
--- /dev/null
+++ b/ironic_python_agent/base.py
@@ -0,0 +1,137 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import threading
+import uuid
+
+
+from teeth_agent import encoding
+from teeth_agent import errors
+from teeth_agent.openstack.common import log
+
+
+class AgentCommandStatus(object):
+ RUNNING = u'RUNNING'
+ SUCCEEDED = u'SUCCEEDED'
+ FAILED = u'FAILED'
+
+
+class BaseCommandResult(encoding.Serializable):
+ def __init__(self, command_name, command_params):
+ self.id = unicode(uuid.uuid4())
+ self.command_name = command_name
+ self.command_params = command_params
+ self.command_status = AgentCommandStatus.RUNNING
+ self.command_error = None
+ self.command_result = None
+
+ def serialize(self):
+ return dict((
+ (u'id', self.id),
+ (u'command_name', self.command_name),
+ (u'command_params', self.command_params),
+ (u'command_status', self.command_status),
+ (u'command_error', self.command_error),
+ (u'command_result', self.command_result),
+ ))
+
+ def is_done(self):
+ return self.command_status != AgentCommandStatus.RUNNING
+
+ def join(self):
+ return self
+
+
+class SyncCommandResult(BaseCommandResult):
+ def __init__(self, command_name, command_params, success, result_or_error):
+ super(SyncCommandResult, self).__init__(command_name,
+ command_params)
+ if success:
+ self.command_status = AgentCommandStatus.SUCCEEDED
+ self.command_result = result_or_error
+ else:
+ self.command_status = AgentCommandStatus.FAILED
+ self.command_error = result_or_error
+
+
+class AsyncCommandResult(BaseCommandResult):
+ """A command that executes asynchronously in the background.
+
+ :param execute_method: a callable to be executed asynchronously
+ """
+ def __init__(self, command_name, command_params, execute_method):
+ super(AsyncCommandResult, self).__init__(command_name, command_params)
+ self.execute_method = execute_method
+ self.command_state_lock = threading.Lock()
+
+ thread_name = 'agent-command-{0}'.format(self.id)
+ self.execution_thread = threading.Thread(target=self.run,
+ name=thread_name)
+
+ def serialize(self):
+ with self.command_state_lock:
+ return super(AsyncCommandResult, self).serialize()
+
+ def start(self):
+ self.execution_thread.start()
+ return self
+
+ def join(self):
+ self.execution_thread.join()
+ return self
+
+ def is_done(self):
+ with self.command_state_lock:
+ return super(AsyncCommandResult, self).is_done()
+
+ def run(self):
+ try:
+ result = self.execute_method(self.command_name,
+ **self.command_params)
+ with self.command_state_lock:
+ self.command_result = result
+ self.command_status = AgentCommandStatus.SUCCEEDED
+
+ except Exception as e:
+ if not isinstance(e, errors.RESTError):
+ e = errors.CommandExecutionError(str(e))
+
+ with self.command_state_lock:
+ self.command_error = e
+ self.command_status = AgentCommandStatus.FAILED
+
+
+class BaseAgentMode(object):
+ def __init__(self, name):
+ super(BaseAgentMode, self).__init__()
+ self.log = log.getLogger(__name__)
+ self.name = name
+ self.command_map = {}
+
+ def execute(self, command_name, **kwargs):
+ if command_name not in self.command_map:
+ raise errors.InvalidCommandError(
+ 'Unknown command: {0}'.format(command_name))
+
+ result = self.command_map[command_name](command_name, **kwargs)
+
+ # In order to enable extremely succinct synchronous commands, we allow
+ # them to return a value directly, and we'll handle wrapping it up in a
+ # SyncCommandResult
+ if not isinstance(result, BaseCommandResult):
+ result = SyncCommandResult(command_name, kwargs, True, result)
+
+ return result
diff --git a/ironic_python_agent/cmd/__init__.py b/ironic_python_agent/cmd/__init__.py
new file mode 100644
index 00000000..13e76de9
--- /dev/null
+++ b/ironic_python_agent/cmd/__init__.py
@@ -0,0 +1,15 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/ironic_python_agent/cmd/agent.py b/ironic_python_agent/cmd/agent.py
new file mode 100644
index 00000000..a30b8a9c
--- /dev/null
+++ b/ironic_python_agent/cmd/agent.py
@@ -0,0 +1,55 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import argparse
+
+from teeth_agent import agent
+
+
+def run():
+ parser = argparse.ArgumentParser(
+ description=('An agent that handles decomissioning and provisioning'
+ ' on behalf of teeth-overlord.'))
+
+ parser.add_argument('--api-url',
+ required=True,
+ help='URL of the Teeth agent API')
+
+ parser.add_argument('--listen-host',
+ default='0.0.0.0',
+ type=str,
+ help='The IP address to listen on.')
+
+ parser.add_argument('--listen-port',
+ default=9999,
+ type=int,
+ help='The port to listen on')
+ parser.add_argument('--advertise-host',
+ default='0.0.0.0',
+ type=str,
+ help='The host to tell Ironic to reply and send '
+ 'commands to.')
+ parser.add_argument('--advertise-port',
+ default=9999,
+ type=int,
+ help='The port to tell Ironic to reply and send '
+ 'commands to.')
+ args = parser.parse_args()
+ agent.build_agent(args.api_url,
+ args.advertise_host,
+ args.advertise_port,
+ args.listen_host,
+ args.listen_port).run()
diff --git a/ironic_python_agent/configdrive.py b/ironic_python_agent/configdrive.py
new file mode 100644
index 00000000..d4bec9b0
--- /dev/null
+++ b/ironic_python_agent/configdrive.py
@@ -0,0 +1,80 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import base64
+import json
+import os
+from teeth_agent import utils
+
+
+class ConfigDriveWriter(object):
+ def __init__(self):
+ self.metadata = {}
+ self.files = utils.get_ordereddict()
+
+ def add_metadata(self, key, value):
+ self.metadata[key] = value
+
+ def add_file(self, filepath, contents):
+ self.files[filepath] = contents
+
+ def write(self, location, prefix='openstack', version='latest'):
+ os.makedirs(os.path.join(location, prefix, version))
+ os.makedirs(os.path.join(location, prefix, 'content'))
+
+ metadata = {}
+ for k, v in self.metadata.iteritems():
+ metadata[k] = v
+
+ if self.files:
+ metadata['files'] = []
+ filenumber = 0
+ for filepath, contents in self.files.iteritems():
+ content_path = '/content/{0:04}'.format(filenumber)
+ file_info = {
+ 'content_path': content_path,
+ 'path': filepath
+ }
+ metadata['files'].append(file_info)
+
+ content_path = os.path.join(location, prefix, content_path[1:])
+ with open(content_path, 'wb') as f:
+ f.write(contents)
+
+ filenumber += 1
+
+ json_metadata = json.dumps(metadata)
+ metadata_path = '{0}/{1}/meta_data.json'.format(prefix, version)
+ metadata_path = os.path.join(location, metadata_path)
+ with open(metadata_path, 'wb') as f:
+ f.write(json_metadata)
+
+
+def write_configdrive(location, metadata, files, prefix='openstack',
+ version='latest'):
+ """Generates and writes a valid configdrive to `location`.
+ `files` are passed in as a dict {path: base64_contents}.
+ """
+ writer = ConfigDriveWriter()
+
+ for k, v in metadata.iteritems():
+ writer.add_metadata(k, v)
+
+ for path, b64_contents in files.iteritems():
+ contents = base64.b64decode(b64_contents)
+ writer.add_file(path, contents)
+
+ writer.write(location, prefix=prefix, version=version)
diff --git a/ironic_python_agent/decom.py b/ironic_python_agent/decom.py
new file mode 100644
index 00000000..b1fe1f10
--- /dev/null
+++ b/ironic_python_agent/decom.py
@@ -0,0 +1,22 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from teeth_agent import base
+
+
+class DecomMode(base.BaseAgentMode):
+ def __init__(self):
+ super(DecomMode, self).__init__('DECOM')
diff --git a/ironic_python_agent/decorators.py b/ironic_python_agent/decorators.py
new file mode 100644
index 00000000..01cd5d7d
--- /dev/null
+++ b/ironic_python_agent/decorators.py
@@ -0,0 +1,42 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import functools
+
+from teeth_agent import base
+
+
+def async_command(validator=None):
+ """Will run the command in an AsyncCommandResult in its own thread.
+ command_name is set based on the func name and command_params will
+ be whatever args/kwargs you pass into the decorated command.
+ """
+ def async_decorator(func):
+ @functools.wraps(func)
+ def wrapper(self, command_name, **command_params):
+ # Run a validator before passing everything off to async.
+ # validators should raise exceptions or return silently.
+ if validator:
+ validator(**command_params)
+
+ # bind self to func so that AsyncCommandResult doesn't need to
+ # know about the mode
+ bound_func = functools.partial(func, self)
+
+ return base.AsyncCommandResult(command_name,
+ command_params,
+ bound_func).start()
+ return wrapper
+ return async_decorator
diff --git a/ironic_python_agent/encoding.py b/ironic_python_agent/encoding.py
new file mode 100644
index 00000000..a6386579
--- /dev/null
+++ b/ironic_python_agent/encoding.py
@@ -0,0 +1,53 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import json
+import uuid
+
+
+class Serializable(object):
+ """Base class for things that can be serialized."""
+ def serialize(self):
+ """Turn this object into a dict."""
+ raise NotImplementedError()
+
+
+class RESTJSONEncoder(json.JSONEncoder):
+ """A slightly customized JSON encoder."""
+ def encode(self, o):
+ """Turn an object into JSON.
+
+ Appends a newline to responses when configured to pretty-print,
+ in order to make use of curl less painful from most shells.
+ """
+ delimiter = ''
+
+ # if indent is None, newlines are still inserted, so we should too.
+ if self.indent is not None:
+ delimiter = '\n'
+
+ return super(RESTJSONEncoder, self).encode(o) + delimiter
+
+ def default(self, o):
+ """Turn an object into a serializable object. In particular, by
+ calling :meth:`.Serializable.serialize`.
+ """
+ if isinstance(o, Serializable):
+ return o.serialize()
+ elif isinstance(o, uuid.UUID):
+ return str(o)
+ else:
+ return json.JSONEncoder.default(self, o)
diff --git a/ironic_python_agent/errors.py b/ironic_python_agent/errors.py
new file mode 100644
index 00000000..6ba45316
--- /dev/null
+++ b/ironic_python_agent/errors.py
@@ -0,0 +1,179 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from teeth_agent import encoding
+from teeth_agent import utils
+
+
+class RESTError(Exception, encoding.Serializable):
+ """Base class for errors generated in teeth."""
+ message = 'An error occurred'
+ details = 'An unexpected error occurred. Please try back later.'
+ status_code = 500
+
+ def serialize(self):
+ """Turn a RESTError into a dict."""
+ return utils.get_ordereddict([
+ ('type', self.__class__.__name__),
+ ('code', self.status_code),
+ ('message', self.message),
+ ('details', self.details),
+ ])
+
+
+class InvalidContentError(RESTError):
+ """Error which occurs when a user supplies invalid content, either
+ because that content cannot be parsed according to the advertised
+ `Content-Type`, or due to a content validation error.
+ """
+ message = 'Invalid request body'
+ status_code = 400
+
+ def __init__(self, details):
+ self.details = details
+
+
+class NotFound(RESTError):
+ """Error which occurs when a user supplies invalid content, either
+ because that content cannot be parsed according to the advertised
+ `Content-Type`, or due to a content validation error.
+ """
+ message = 'Not found'
+ status_code = 404
+ details = 'The requested URL was not found.'
+
+
+class CommandExecutionError(RESTError):
+ """Error raised when a command fails to execute."""
+
+ message = 'Command execution failed'
+
+ def __init__(self, details):
+ super(CommandExecutionError, self).__init__()
+ self.details = details
+
+
+class InvalidCommandError(InvalidContentError):
+ """Error which is raised when an unknown command is issued."""
+
+ messsage = 'Invalid command'
+
+ def __init__(self, details):
+ super(InvalidCommandError, self).__init__(details)
+
+
+class InvalidCommandParamsError(InvalidContentError):
+ """Error which is raised when command parameters are invalid."""
+
+ message = 'Invalid command parameters'
+
+ def __init__(self, details):
+ super(InvalidCommandParamsError, self).__init__(details)
+
+
+class RequestedObjectNotFoundError(NotFound):
+ def __init__(self, type_descr, obj_id):
+ details = '{0} with id {1} not found.'.format(type_descr, obj_id)
+ super(RequestedObjectNotFoundError, self).__init__(details)
+ self.details = details
+
+
+class OverlordAPIError(RESTError):
+ """Error raised when a call to the agent API fails."""
+
+ message = 'Error in call to teeth-agent-api.'
+
+ def __init__(self, details):
+ super(OverlordAPIError, self).__init__(details)
+ self.details = details
+
+
+class HeartbeatError(OverlordAPIError):
+ """Error raised when a heartbeat to the agent API fails."""
+
+ message = 'Error heartbeating to agent API.'
+
+ def __init__(self, details):
+ super(HeartbeatError, self).__init__(details)
+
+
+class LookupNodeError(OverlordAPIError):
+ """Error raised when the node configuration lookup to the Ironic API
+ fails.
+ """
+
+ message = 'Error getting configuration from Ironic.'
+
+ def __init__(self, details):
+ super(LookupNodeError, self).__init__(details)
+
+
+class ImageDownloadError(RESTError):
+ """Error raised when an image cannot be downloaded."""
+
+ message = 'Error downloading image.'
+
+ def __init__(self, image_id):
+ super(ImageDownloadError, self).__init__()
+ self.details = 'Could not download image with id {0}.'.format(image_id)
+
+
+class ImageChecksumError(RESTError):
+ """Error raised when an image fails to verify against its checksum."""
+
+ message = 'Error verifying image checksum.'
+
+ def __init__(self, image_id):
+ super(ImageChecksumError, self).__init__()
+ self.details = 'Image with id {0} failed to verify against checksum.'
+ self.details = self.details.format(image_id)
+
+
+class ImageWriteError(RESTError):
+ """Error raised when an image cannot be written to a device."""
+
+ message = 'Error writing image to device.'
+
+ def __init__(self, exit_code, device):
+ super(ImageWriteError, self).__init__()
+ self.details = 'Writing image to device {0} failed with exit code {1}.'
+ self.details = self.details.format(device, exit_code)
+
+
+class ConfigDriveWriteError(RESTError):
+ """Error raised when a configdrive directory cannot be written to a
+ device.
+ """
+
+ message = 'Error writing configdrive to device.'
+
+ def __init__(self, exit_code, device):
+ details = 'Writing configdrive to device {0} failed with exit code ' \
+ '{1}.'
+ details = details.format(device, exit_code)
+ super(ConfigDriveWriteError, self).__init__(details)
+ self.details = details
+
+
+class SystemRebootError(RESTError):
+ """Error raised when a system cannot reboot."""
+
+ message = 'Error rebooting system.'
+
+ def __init__(self, exit_code):
+ super(SystemRebootError, self).__init__()
+ self.details = 'Reboot script failed with exit code {0}.'
+ self.details = self.details.format(exit_code)
diff --git a/ironic_python_agent/hardware.py b/ironic_python_agent/hardware.py
new file mode 100644
index 00000000..d6ed9d5d
--- /dev/null
+++ b/ironic_python_agent/hardware.py
@@ -0,0 +1,183 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import abc
+import os
+import subprocess
+
+import stevedore
+
+from teeth_agent import encoding
+from teeth_agent.openstack.common import log
+from teeth_agent import utils
+
+_global_manager = None
+
+
+class HardwareSupport(object):
+ """These are just guidelines to suggest values that might be returned by
+ calls to `evaluate_hardware_support`. No HardwareManager in mainline
+ teeth-agent will ever offer a value greater than `MAINLINE`. Service
+ Providers should feel free to return values greater than SERVICE_PROVIDER
+ to distinguish between additional levels of support.
+ """
+ NONE = 0
+ GENERIC = 1
+ MAINLINE = 2
+ SERVICE_PROVIDER = 3
+
+
+class HardwareType(object):
+ MAC_ADDRESS = 'mac_address'
+
+
+class HardwareInfo(encoding.Serializable):
+ def __init__(self, type, id):
+ self.type = type
+ self.id = id
+
+ def serialize(self):
+ return utils.get_ordereddict([
+ ('type', self.type),
+ ('id', self.id),
+ ])
+
+
+class BlockDevice(object):
+ def __init__(self, name, size, start_sector):
+ self.name = name
+ self.size = size
+ self.start_sector = start_sector
+
+
+class NetworkInterface(object):
+ def __init__(self, name, mac_addr):
+ self.name = name
+ self.mac_address = mac_addr
+ # TODO(russellhaering): Pull these from LLDP
+ self.switch_port_descr = None
+ self.switch_chassis_descr = None
+
+
+class HardwareManager(object):
+ @abc.abstractmethod
+ def evaluate_hardware_support(cls):
+ pass
+
+ @abc.abstractmethod
+ def list_network_interfaces(self):
+ pass
+
+ @abc.abstractmethod
+ def get_os_install_device(self):
+ pass
+
+ def list_hardware_info(self):
+ hardware_info = []
+ for interface in self.list_network_interfaces():
+ hardware_info.append(HardwareInfo(HardwareType.MAC_ADDRESS,
+ interface.mac_address))
+ return hardware_info
+
+
+class GenericHardwareManager(HardwareManager):
+ def __init__(self):
+ self.sys_path = '/sys'
+
+ if os.path.isdir('/mnt/sys'):
+ self.sys_path = '/mnt/sys'
+
+ def evaluate_hardware_support(cls):
+ return HardwareSupport.GENERIC
+
+ def _get_interface_info(self, interface_name):
+ addr_path = '{0}/class/net/{1}/address'.format(self.sys_path,
+ interface_name)
+ addr_file = open(addr_path, 'r')
+ mac_addr = addr_file.read().strip()
+ return NetworkInterface(interface_name, mac_addr)
+
+ def _is_device(self, interface_name):
+ device_path = '{0}/class/net/{1}/device'.format(self.sys_path,
+ interface_name)
+ return os.path.exists(device_path)
+
+ def list_network_interfaces(self):
+ iface_names = os.listdir('{0}/class/net'.format(self.sys_path))
+ return [self._get_interface_info(name)
+ for name in iface_names
+ if self._is_device(name)]
+
+ def _cmd(self, command):
+ process = subprocess.Popen(command, stdout=subprocess.PIPE)
+ return process.communicate()
+
+ def _list_block_devices(self):
+ report = self._cmd(['blockdev', '--report'])[0]
+ lines = report.split('\n')
+ lines = [line.split() for line in lines if line is not '']
+ startsec_idx = lines[0].index('StartSec')
+ device_idx = lines[0].index('Device')
+ size_idx = lines[0].index('Size')
+ return [BlockDevice(line[device_idx],
+ int(line[size_idx]),
+ int(line[startsec_idx]))
+ for line
+ in lines[1:]]
+
+ def get_os_install_device(self):
+ # Assume anything with a start sector other than 0, is a partition
+ block_devices = [device for device in self._list_block_devices()
+ if device.start_sector == 0]
+
+ # Find the first device larger than 4GB, assume it is the OS disk
+ # TODO(russellhaering): This isn't a valid assumption in all cases,
+ # is there a more reasonable default behavior?
+ block_devices.sort(key=lambda device: device.size)
+ for device in block_devices:
+ if device.size >= (4 * pow(1024, 3)):
+ return device.name
+
+
+def _compare_extensions(ext1, ext2):
+ mgr1 = ext1.obj
+ mgr2 = ext2.obj
+ return mgr1.evaluate_hardware_support() - mgr2.evaluate_hardware_support()
+
+
+def get_manager():
+ global _global_manager
+
+ if not _global_manager:
+ LOG = log.getLogger()
+ extension_manager = stevedore.ExtensionManager(
+ namespace='teeth_agent.hardware_managers',
+ invoke_on_load=True)
+
+ # There will always be at least one extension available (the
+ # GenericHardwareManager).
+ preferred_extension = sorted(extension_manager, _compare_extensions)[0]
+ preferred_manager = preferred_extension.obj
+
+ if preferred_manager.evaluate_hardware_support() <= 0:
+ raise RuntimeError('No suitable HardwareManager could be found')
+
+ LOG.info('selected hardware manager {0}'.format(
+ preferred_extension.entry_point_target))
+
+ _global_manager = preferred_manager
+
+ return _global_manager
diff --git a/ironic_python_agent/openstack/__init__.py b/ironic_python_agent/openstack/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/ironic_python_agent/openstack/__init__.py
diff --git a/ironic_python_agent/openstack/common/__init__.py b/ironic_python_agent/openstack/common/__init__.py
new file mode 100644
index 00000000..d1223eaf
--- /dev/null
+++ b/ironic_python_agent/openstack/common/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import six
+
+
+six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
diff --git a/ironic_python_agent/openstack/common/gettextutils.py b/ironic_python_agent/openstack/common/gettextutils.py
new file mode 100644
index 00000000..edcf14c1
--- /dev/null
+++ b/ironic_python_agent/openstack/common/gettextutils.py
@@ -0,0 +1,448 @@
+# Copyright 2012 Red Hat, Inc.
+# Copyright 2013 IBM Corp.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+gettext for openstack-common modules.
+
+Usual usage in an openstack.common module:
+
+ from teeth_agent.openstack.common.gettextutils import _
+"""
+
+import copy
+import functools
+import gettext
+import locale
+from logging import handlers
+import os
+
+from babel import localedata
+import six
+
+_localedir = os.environ.get('teeth_agent'.upper() + '_LOCALEDIR')
+_t = gettext.translation('teeth_agent', localedir=_localedir, fallback=True)
+
+# We use separate translation catalogs for each log level, so set up a
+# mapping between the log level name and the translator. The domain
+# for the log level is project_name + "-log-" + log_level so messages
+# for each level end up in their own catalog.
+_t_log_levels = dict(
+ (level, gettext.translation('teeth_agent' + '-log-' + level,
+ localedir=_localedir,
+ fallback=True))
+ for level in ['info', 'warning', 'error', 'critical']
+)
+
+_AVAILABLE_LANGUAGES = {}
+USE_LAZY = False
+
+
+def enable_lazy():
+ """Convenience function for configuring _() to use lazy gettext
+
+ Call this at the start of execution to enable the gettextutils._
+ function to use lazy gettext functionality. This is useful if
+ your project is importing _ directly instead of using the
+ gettextutils.install() way of importing the _ function.
+ """
+ global USE_LAZY
+ USE_LAZY = True
+
+
+def _(msg):
+ if USE_LAZY:
+ return Message(msg, domain='teeth_agent')
+ else:
+ if six.PY3:
+ return _t.gettext(msg)
+ return _t.ugettext(msg)
+
+
+def _log_translation(msg, level):
+ """Build a single translation of a log message
+ """
+ if USE_LAZY:
+ return Message(msg, domain='teeth_agent' + '-log-' + level)
+ else:
+ translator = _t_log_levels[level]
+ if six.PY3:
+ return translator.gettext(msg)
+ return translator.ugettext(msg)
+
+# Translators for log levels.
+#
+# The abbreviated names are meant to reflect the usual use of a short
+# name like '_'. The "L" is for "log" and the other letter comes from
+# the level.
+_LI = functools.partial(_log_translation, level='info')
+_LW = functools.partial(_log_translation, level='warning')
+_LE = functools.partial(_log_translation, level='error')
+_LC = functools.partial(_log_translation, level='critical')
+
+
+def install(domain, lazy=False):
+ """Install a _() function using the given translation domain.
+
+ Given a translation domain, install a _() function using gettext's
+ install() function.
+
+ The main difference from gettext.install() is that we allow
+ overriding the default localedir (e.g. /usr/share/locale) using
+ a translation-domain-specific environment variable (e.g.
+ NOVA_LOCALEDIR).
+
+ :param domain: the translation domain
+ :param lazy: indicates whether or not to install the lazy _() function.
+ The lazy _() introduces a way to do deferred translation
+ of messages by installing a _ that builds Message objects,
+ instead of strings, which can then be lazily translated into
+ any available locale.
+ """
+ if lazy:
+ # NOTE(mrodden): Lazy gettext functionality.
+ #
+ # The following introduces a deferred way to do translations on
+ # messages in OpenStack. We override the standard _() function
+ # and % (format string) operation to build Message objects that can
+ # later be translated when we have more information.
+ def _lazy_gettext(msg):
+ """Create and return a Message object.
+
+ Lazy gettext function for a given domain, it is a factory method
+ for a project/module to get a lazy gettext function for its own
+ translation domain (i.e. nova, glance, cinder, etc.)
+
+ Message encapsulates a string so that we can translate
+ it later when needed.
+ """
+ return Message(msg, domain=domain)
+
+ from six import moves
+ moves.builtins.__dict__['_'] = _lazy_gettext
+ else:
+ localedir = '%s_LOCALEDIR' % domain.upper()
+ if six.PY3:
+ gettext.install(domain,
+ localedir=os.environ.get(localedir))
+ else:
+ gettext.install(domain,
+ localedir=os.environ.get(localedir),
+ unicode=True)
+
+
+class Message(six.text_type):
+ """A Message object is a unicode object that can be translated.
+
+ Translation of Message is done explicitly using the translate() method.
+ For all non-translation intents and purposes, a Message is simply unicode,
+ and can be treated as such.
+ """
+
+ def __new__(cls, msgid, msgtext=None, params=None,
+ domain='teeth_agent', *args):
+ """Create a new Message object.
+
+ In order for translation to work gettext requires a message ID, this
+ msgid will be used as the base unicode text. It is also possible
+ for the msgid and the base unicode text to be different by passing
+ the msgtext parameter.
+ """
+ # If the base msgtext is not given, we use the default translation
+ # of the msgid (which is in English) just in case the system locale is
+ # not English, so that the base text will be in that locale by default.
+ if not msgtext:
+ msgtext = Message._translate_msgid(msgid, domain)
+ # We want to initialize the parent unicode with the actual object that
+ # would have been plain unicode if 'Message' was not enabled.
+ msg = super(Message, cls).__new__(cls, msgtext)
+ msg.msgid = msgid
+ msg.domain = domain
+ msg.params = params
+ return msg
+
+ def translate(self, desired_locale=None):
+ """Translate this message to the desired locale.
+
+ :param desired_locale: The desired locale to translate the message to,
+ if no locale is provided the message will be
+ translated to the system's default locale.
+
+ :returns: the translated message in unicode
+ """
+
+ translated_message = Message._translate_msgid(self.msgid,
+ self.domain,
+ desired_locale)
+ if self.params is None:
+ # No need for more translation
+ return translated_message
+
+ # This Message object may have been formatted with one or more
+ # Message objects as substitution arguments, given either as a single
+ # argument, part of a tuple, or as one or more values in a dictionary.
+ # When translating this Message we need to translate those Messages too
+ translated_params = _translate_args(self.params, desired_locale)
+
+ translated_message = translated_message % translated_params
+
+ return translated_message
+
+ @staticmethod
+ def _translate_msgid(msgid, domain, desired_locale=None):
+ if not desired_locale:
+ system_locale = locale.getdefaultlocale()
+ # If the system locale is not available to the runtime use English
+ if not system_locale[0]:
+ desired_locale = 'en_US'
+ else:
+ desired_locale = system_locale[0]
+
+ locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR')
+ lang = gettext.translation(domain,
+ localedir=locale_dir,
+ languages=[desired_locale],
+ fallback=True)
+ if six.PY3:
+ translator = lang.gettext
+ else:
+ translator = lang.ugettext
+
+ translated_message = translator(msgid)
+ return translated_message
+
+ def __mod__(self, other):
+ # When we mod a Message we want the actual operation to be performed
+ # by the parent class (i.e. unicode()), the only thing we do here is
+ # save the original msgid and the parameters in case of a translation
+ params = self._sanitize_mod_params(other)
+ unicode_mod = super(Message, self).__mod__(params)
+ modded = Message(self.msgid,
+ msgtext=unicode_mod,
+ params=params,
+ domain=self.domain)
+ return modded
+
+ def _sanitize_mod_params(self, other):
+ """Sanitize the object being modded with this Message.
+
+ - Add support for modding 'None' so translation supports it
+ - Trim the modded object, which can be a large dictionary, to only
+ those keys that would actually be used in a translation
+ - Snapshot the object being modded, in case the message is
+ translated, it will be used as it was when the Message was created
+ """
+ if other is None:
+ params = (other,)
+ elif isinstance(other, dict):
+ # Merge the dictionaries
+ # Copy each item in case one does not support deep copy.
+ params = {}
+ if isinstance(self.params, dict):
+ for key, val in self.params.items():
+ params[key] = self._copy_param(val)
+ for key, val in other.items():
+ params[key] = self._copy_param(val)
+ else:
+ params = self._copy_param(other)
+ return params
+
+ def _copy_param(self, param):
+ try:
+ return copy.deepcopy(param)
+ except Exception:
+ # Fallback to casting to unicode this will handle the
+ # python code-like objects that can't be deep-copied
+ return six.text_type(param)
+
+ def __add__(self, other):
+ msg = _('Message objects do not support addition.')
+ raise TypeError(msg)
+
+ def __radd__(self, other):
+ return self.__add__(other)
+
+ def __str__(self):
+ # NOTE(luisg): Logging in python 2.6 tries to str() log records,
+ # and it expects specifically a UnicodeError in order to proceed.
+ msg = _('Message objects do not support str() because they may '
+ 'contain non-ascii characters. '
+ 'Please use unicode() or translate() instead.')
+ raise UnicodeError(msg)
+
+
+def get_available_languages(domain):
+ """Lists the available languages for the given translation domain.
+
+ :param domain: the domain to get languages for
+ """
+ if domain in _AVAILABLE_LANGUAGES:
+ return copy.copy(_AVAILABLE_LANGUAGES[domain])
+
+ localedir = '%s_LOCALEDIR' % domain.upper()
+ find = lambda x: gettext.find(domain,
+ localedir=os.environ.get(localedir),
+ languages=[x])
+
+ # NOTE(mrodden): en_US should always be available (and first in case
+ # order matters) since our in-line message strings are en_US
+ language_list = ['en_US']
+ # NOTE(luisg): Babel <1.0 used a function called list(), which was
+ # renamed to locale_identifiers() in >=1.0, the requirements master list
+ # requires >=0.9.6, uncapped, so defensively work with both. We can remove
+ # this check when the master list updates to >=1.0, and update all projects
+ list_identifiers = (getattr(localedata, 'list', None) or
+ getattr(localedata, 'locale_identifiers'))
+ locale_identifiers = list_identifiers()
+
+ for i in locale_identifiers:
+ if find(i) is not None:
+ language_list.append(i)
+
+ # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported
+ # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they
+ # are perfectly legitimate locales:
+ # https://github.com/mitsuhiko/babel/issues/37
+ # In Babel 1.3 they fixed the bug and they support these locales, but
+ # they are still not explicitly "listed" by locale_identifiers().
+ # That is why we add the locales here explicitly if necessary so that
+ # they are listed as supported.
+ aliases = {'zh': 'zh_CN',
+ 'zh_Hant_HK': 'zh_HK',
+ 'zh_Hant': 'zh_TW',
+ 'fil': 'tl_PH'}
+ for (locale, alias) in six.iteritems(aliases):
+ if locale in language_list and alias not in language_list:
+ language_list.append(alias)
+
+ _AVAILABLE_LANGUAGES[domain] = language_list
+ return copy.copy(language_list)
+
+
+def translate(obj, desired_locale=None):
+ """Gets the translated unicode representation of the given object.
+
+ If the object is not translatable it is returned as-is.
+ If the locale is None the object is translated to the system locale.
+
+ :param obj: the object to translate
+ :param desired_locale: the locale to translate the message to, if None the
+ default system locale will be used
+ :returns: the translated object in unicode, or the original object if
+ it could not be translated
+ """
+ message = obj
+ if not isinstance(message, Message):
+ # If the object to translate is not already translatable,
+ # let's first get its unicode representation
+ message = six.text_type(obj)
+ if isinstance(message, Message):
+ # Even after unicoding() we still need to check if we are
+ # running with translatable unicode before translating
+ return message.translate(desired_locale)
+ return obj
+
+
+def _translate_args(args, desired_locale=None):
+ """Translates all the translatable elements of the given arguments object.
+
+ This method is used for translating the translatable values in method
+ arguments which include values of tuples or dictionaries.
+ If the object is not a tuple or a dictionary the object itself is
+ translated if it is translatable.
+
+ If the locale is None the object is translated to the system locale.
+
+ :param args: the args to translate
+ :param desired_locale: the locale to translate the args to, if None the
+ default system locale will be used
+ :returns: a new args object with the translated contents of the original
+ """
+ if isinstance(args, tuple):
+ return tuple(translate(v, desired_locale) for v in args)
+ if isinstance(args, dict):
+ translated_dict = {}
+ for (k, v) in six.iteritems(args):
+ translated_v = translate(v, desired_locale)
+ translated_dict[k] = translated_v
+ return translated_dict
+ return translate(args, desired_locale)
+
+
+class TranslationHandler(handlers.MemoryHandler):
+ """Handler that translates records before logging them.
+
+ The TranslationHandler takes a locale and a target logging.Handler object
+ to forward LogRecord objects to after translating them. This handler
+ depends on Message objects being logged, instead of regular strings.
+
+ The handler can be configured declaratively in the logging.conf as follows:
+
+ [handlers]
+ keys = translatedlog, translator
+
+ [handler_translatedlog]
+ class = handlers.WatchedFileHandler
+ args = ('/var/log/api-localized.log',)
+ formatter = context
+
+ [handler_translator]
+ class = openstack.common.log.TranslationHandler
+ target = translatedlog
+ args = ('zh_CN',)
+
+ If the specified locale is not available in the system, the handler will
+ log in the default locale.
+ """
+
+ def __init__(self, locale=None, target=None):
+ """Initialize a TranslationHandler
+
+ :param locale: locale to use for translating messages
+ :param target: logging.Handler object to forward
+ LogRecord objects to after translation
+ """
+ # NOTE(luisg): In order to allow this handler to be a wrapper for
+ # other handlers, such as a FileHandler, and still be able to
+ # configure it using logging.conf, this handler has to extend
+ # MemoryHandler because only the MemoryHandlers' logging.conf
+ # parsing is implemented such that it accepts a target handler.
+ handlers.MemoryHandler.__init__(self, capacity=0, target=target)
+ self.locale = locale
+
+ def setFormatter(self, fmt):
+ self.target.setFormatter(fmt)
+
+ def emit(self, record):
+ # We save the message from the original record to restore it
+ # after translation, so other handlers are not affected by this
+ original_msg = record.msg
+ original_args = record.args
+
+ try:
+ self._translate_and_log_record(record)
+ finally:
+ record.msg = original_msg
+ record.args = original_args
+
+ def _translate_and_log_record(self, record):
+ record.msg = translate(record.msg, self.locale)
+
+ # In addition to translating the message, we also need to translate
+ # arguments that were passed to the log method that were not part
+ # of the main message e.g., log.info(_('Some message %s'), this_one))
+ record.args = _translate_args(record.args, self.locale)
+
+ self.target.emit(record)
diff --git a/ironic_python_agent/openstack/common/importutils.py b/ironic_python_agent/openstack/common/importutils.py
new file mode 100644
index 00000000..42b36f67
--- /dev/null
+++ b/ironic_python_agent/openstack/common/importutils.py
@@ -0,0 +1,73 @@
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Import related utilities and helper functions.
+"""
+
+import sys
+import traceback
+
+
+def import_class(import_str):
+ """Returns a class from a string including module and class."""
+ mod_str, _sep, class_str = import_str.rpartition('.')
+ try:
+ __import__(mod_str)
+ return getattr(sys.modules[mod_str], class_str)
+ except (ValueError, AttributeError):
+ raise ImportError('Class %s cannot be found (%s)' %
+ (class_str,
+ traceback.format_exception(*sys.exc_info())))
+
+
+def import_object(import_str, *args, **kwargs):
+ """Import a class and return an instance of it."""
+ return import_class(import_str)(*args, **kwargs)
+
+
+def import_object_ns(name_space, import_str, *args, **kwargs):
+ """Tries to import object from default namespace.
+
+ Imports a class and return an instance of it, first by trying
+ to find the class in a default namespace, then failing back to
+ a full path if not found in the default namespace.
+ """
+ import_value = "%s.%s" % (name_space, import_str)
+ try:
+ return import_class(import_value)(*args, **kwargs)
+ except ImportError:
+ return import_class(import_str)(*args, **kwargs)
+
+
+def import_module(import_str):
+ """Import a module."""
+ __import__(import_str)
+ return sys.modules[import_str]
+
+
+def import_versioned_module(version, submodule=None):
+ module = 'teeth_agent.v%s' % version
+ if submodule:
+ module = '.'.join((module, submodule))
+ return import_module(module)
+
+
+def try_import(import_str, default=None):
+ """Try to import a module and if it fails return default."""
+ try:
+ return import_module(import_str)
+ except ImportError:
+ return default
diff --git a/ironic_python_agent/openstack/common/jsonutils.py b/ironic_python_agent/openstack/common/jsonutils.py
new file mode 100644
index 00000000..bbc83368
--- /dev/null
+++ b/ironic_python_agent/openstack/common/jsonutils.py
@@ -0,0 +1,174 @@
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# Copyright 2011 Justin Santa Barbara
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+'''
+JSON related utilities.
+
+This module provides a few things:
+
+ 1) A handy function for getting an object down to something that can be
+ JSON serialized. See to_primitive().
+
+ 2) Wrappers around loads() and dumps(). The dumps() wrapper will
+ automatically use to_primitive() for you if needed.
+
+ 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
+ is available.
+'''
+
+
+import datetime
+import functools
+import inspect
+import itertools
+import json
+
+import six
+import six.moves.xmlrpc_client as xmlrpclib
+
+from teeth_agent.openstack.common import gettextutils
+from teeth_agent.openstack.common import importutils
+from teeth_agent.openstack.common import timeutils
+
+netaddr = importutils.try_import("netaddr")
+
+_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
+ inspect.isfunction, inspect.isgeneratorfunction,
+ inspect.isgenerator, inspect.istraceback, inspect.isframe,
+ inspect.iscode, inspect.isbuiltin, inspect.isroutine,
+ inspect.isabstract]
+
+_simple_types = (six.string_types + six.integer_types
+ + (type(None), bool, float))
+
+
+def to_primitive(value, convert_instances=False, convert_datetime=True,
+ level=0, max_depth=3):
+ """Convert a complex object into primitives.
+
+ Handy for JSON serialization. We can optionally handle instances,
+ but since this is a recursive function, we could have cyclical
+ data structures.
+
+ To handle cyclical data structures we could track the actual objects
+ visited in a set, but not all objects are hashable. Instead we just
+ track the depth of the object inspections and don't go too deep.
+
+ Therefore, convert_instances=True is lossy ... be aware.
+
+ """
+ # handle obvious types first - order of basic types determined by running
+ # full tests on nova project, resulting in the following counts:
+ # 572754 <type 'NoneType'>
+ # 460353 <type 'int'>
+ # 379632 <type 'unicode'>
+ # 274610 <type 'str'>
+ # 199918 <type 'dict'>
+ # 114200 <type 'datetime.datetime'>
+ # 51817 <type 'bool'>
+ # 26164 <type 'list'>
+ # 6491 <type 'float'>
+ # 283 <type 'tuple'>
+ # 19 <type 'long'>
+ if isinstance(value, _simple_types):
+ return value
+
+ if isinstance(value, datetime.datetime):
+ if convert_datetime:
+ return timeutils.strtime(value)
+ else:
+ return value
+
+ # value of itertools.count doesn't get caught by nasty_type_tests
+ # and results in infinite loop when list(value) is called.
+ if type(value) == itertools.count:
+ return six.text_type(value)
+
+ # FIXME(vish): Workaround for LP bug 852095. Without this workaround,
+ # tests that raise an exception in a mocked method that
+ # has a @wrap_exception with a notifier will fail. If
+ # we up the dependency to 0.5.4 (when it is released) we
+ # can remove this workaround.
+ if getattr(value, '__module__', None) == 'mox':
+ return 'mock'
+
+ if level > max_depth:
+ return '?'
+
+ # The try block may not be necessary after the class check above,
+ # but just in case ...
+ try:
+ recursive = functools.partial(to_primitive,
+ convert_instances=convert_instances,
+ convert_datetime=convert_datetime,
+ level=level,
+ max_depth=max_depth)
+ if isinstance(value, dict):
+ return dict((k, recursive(v)) for k, v in six.iteritems(value))
+ elif isinstance(value, (list, tuple)):
+ return [recursive(lv) for lv in value]
+
+ # It's not clear why xmlrpclib created their own DateTime type, but
+ # for our purposes, make it a datetime type which is explicitly
+ # handled
+ if isinstance(value, xmlrpclib.DateTime):
+ value = datetime.datetime(*tuple(value.timetuple())[:6])
+
+ if convert_datetime and isinstance(value, datetime.datetime):
+ return timeutils.strtime(value)
+ elif isinstance(value, gettextutils.Message):
+ return value.data
+ elif hasattr(value, 'iteritems'):
+ return recursive(dict(value.iteritems()), level=level + 1)
+ elif hasattr(value, '__iter__'):
+ return recursive(list(value))
+ elif convert_instances and hasattr(value, '__dict__'):
+ # Likely an instance of something. Watch for cycles.
+ # Ignore class member vars.
+ return recursive(value.__dict__, level=level + 1)
+ elif netaddr and isinstance(value, netaddr.IPAddress):
+ return six.text_type(value)
+ else:
+ if any(test(value) for test in _nasty_type_tests):
+ return six.text_type(value)
+ return value
+ except TypeError:
+ # Class objects are tricky since they may define something like
+ # __iter__ defined but it isn't callable as list().
+ return six.text_type(value)
+
+
+def dumps(value, default=to_primitive, **kwargs):
+ return json.dumps(value, default=default, **kwargs)
+
+
+def loads(s):
+ return json.loads(s)
+
+
+def load(s):
+ return json.load(s)
+
+
+try:
+ import anyjson
+except ImportError:
+ pass
+else:
+ anyjson._modules.append((__name__, 'dumps', TypeError,
+ 'loads', ValueError, 'load'))
+ anyjson.force_implementation(__name__)
diff --git a/ironic_python_agent/openstack/common/local.py b/ironic_python_agent/openstack/common/local.py
new file mode 100644
index 00000000..0819d5b9
--- /dev/null
+++ b/ironic_python_agent/openstack/common/local.py
@@ -0,0 +1,45 @@
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Local storage of variables using weak references"""
+
+import threading
+import weakref
+
+
+class WeakLocal(threading.local):
+ def __getattribute__(self, attr):
+ rval = super(WeakLocal, self).__getattribute__(attr)
+ if rval:
+ # NOTE(mikal): this bit is confusing. What is stored is a weak
+ # reference, not the value itself. We therefore need to lookup
+ # the weak reference and return the inner value here.
+ rval = rval()
+ return rval
+
+ def __setattr__(self, attr, value):
+ value = weakref.ref(value)
+ return super(WeakLocal, self).__setattr__(attr, value)
+
+
+# NOTE(mikal): the name "store" should be deprecated in the future
+store = WeakLocal()
+
+# A "weak" store uses weak references and allows an object to fall out of scope
+# when it falls out of scope in the code that uses the thread local storage. A
+# "strong" store will hold a reference to the object so that it never falls out
+# of scope.
+weak_store = WeakLocal()
+strong_store = threading.local()
diff --git a/ironic_python_agent/openstack/common/log.py b/ironic_python_agent/openstack/common/log.py
new file mode 100644
index 00000000..20f4ca96
--- /dev/null
+++ b/ironic_python_agent/openstack/common/log.py
@@ -0,0 +1,712 @@
+# Copyright 2011 OpenStack Foundation.
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""OpenStack logging handler.
+
+This module adds to logging functionality by adding the option to specify
+a context object when calling the various log methods. If the context object
+is not specified, default formatting is used. Additionally, an instance uuid
+may be passed as part of the log message, which is intended to make it easier
+for admins to find messages related to a specific instance.
+
+It also allows setting of formatting information through conf.
+
+"""
+
+import inspect
+import itertools
+import logging
+import logging.config
+import logging.handlers
+import os
+import re
+import sys
+import traceback
+
+from oslo.config import cfg
+import six
+from six import moves
+
+from teeth_agent.openstack.common.gettextutils import _
+from teeth_agent.openstack.common import importutils
+from teeth_agent.openstack.common import jsonutils
+from teeth_agent.openstack.common import local
+
+
+_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
+
+_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
+
+# NOTE(ldbragst): Let's build a list of regex objects using the list of
+# _SANITIZE_KEYS we already have. This way, we only have to add the new key
+# to the list of _SANITIZE_KEYS and we can generate regular expressions
+# for XML and JSON automatically.
+_SANITIZE_PATTERNS = []
+_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
+ r'(<%(key)s>).*?(</%(key)s>)',
+ r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
+ r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])']
+
+for key in _SANITIZE_KEYS:
+ for pattern in _FORMAT_PATTERNS:
+ reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
+ _SANITIZE_PATTERNS.append(reg_ex)
+
+
+common_cli_opts = [
+ cfg.BoolOpt('debug',
+ short='d',
+ default=False,
+ help='Print debugging output (set logging level to '
+ 'DEBUG instead of default WARNING level).'),
+ cfg.BoolOpt('verbose',
+ short='v',
+ default=False,
+ help='Print more verbose output (set logging level to '
+ 'INFO instead of default WARNING level).'),
+]
+
+logging_cli_opts = [
+ cfg.StrOpt('log-config-append',
+ metavar='PATH',
+ deprecated_name='log-config',
+ help='The name of logging configuration file. It does not '
+ 'disable existing loggers, but just appends specified '
+ 'logging configuration to any other existing logging '
+ 'options. Please see the Python logging module '
+ 'documentation for details on logging configuration '
+ 'files.'),
+ cfg.StrOpt('log-format',
+ default=None,
+ metavar='FORMAT',
+ help='DEPRECATED. '
+ 'A logging.Formatter log message format string which may '
+ 'use any of the available logging.LogRecord attributes. '
+ 'This option is deprecated. Please use '
+ 'logging_context_format_string and '
+ 'logging_default_format_string instead.'),
+ cfg.StrOpt('log-date-format',
+ default=_DEFAULT_LOG_DATE_FORMAT,
+ metavar='DATE_FORMAT',
+ help='Format string for %%(asctime)s in log records. '
+ 'Default: %(default)s'),
+ cfg.StrOpt('log-file',
+ metavar='PATH',
+ deprecated_name='logfile',
+ help='(Optional) Name of log file to output to. '
+ 'If no default is set, logging will go to stdout.'),
+ cfg.StrOpt('log-dir',
+ deprecated_name='logdir',
+ help='(Optional) The base directory used for relative '
+ '--log-file paths'),
+ cfg.BoolOpt('use-syslog',
+ default=False,
+ help='Use syslog for logging. '
+ 'Existing syslog format is DEPRECATED during I, '
+ 'and then will be changed in J to honor RFC5424'),
+ cfg.BoolOpt('use-syslog-rfc-format',
+ # TODO(bogdando) remove or use True after existing
+ # syslog format deprecation in J
+ default=False,
+ help='(Optional) Use syslog rfc5424 format for logging. '
+ 'If enabled, will add APP-NAME (RFC5424) before the '
+ 'MSG part of the syslog message. The old format '
+ 'without APP-NAME is deprecated in I, '
+ 'and will be removed in J.'),
+ cfg.StrOpt('syslog-log-facility',
+ default='LOG_USER',
+ help='Syslog facility to receive log lines')
+]
+
+generic_log_opts = [
+ cfg.BoolOpt('use_stderr',
+ default=True,
+ help='Log output to standard error')
+]
+
+log_opts = [
+ cfg.StrOpt('logging_context_format_string',
+ default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
+ '%(name)s [%(request_id)s %(user_identity)s] '
+ '%(instance)s%(message)s',
+ help='Format string to use for log messages with context'),
+ cfg.StrOpt('logging_default_format_string',
+ default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
+ '%(name)s [-] %(instance)s%(message)s',
+ help='Format string to use for log messages without context'),
+ cfg.StrOpt('logging_debug_format_suffix',
+ default='%(funcName)s %(pathname)s:%(lineno)d',
+ help='Data to append to log format when level is DEBUG'),
+ cfg.StrOpt('logging_exception_prefix',
+ default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
+ '%(instance)s',
+ help='Prefix each line of exception output with this format'),
+ cfg.ListOpt('default_log_levels',
+ default=[
+ 'amqp=WARN',
+ 'amqplib=WARN',
+ 'boto=WARN',
+ 'qpid=WARN',
+ 'sqlalchemy=WARN',
+ 'suds=INFO',
+ 'iso8601=WARN',
+ 'requests.packages.urllib3.connectionpool=WARN'
+ ],
+ help='List of logger=LEVEL pairs'),
+ cfg.BoolOpt('publish_errors',
+ default=False,
+ help='Publish error events'),
+ cfg.BoolOpt('fatal_deprecations',
+ default=False,
+ help='Make deprecations fatal'),
+
+ # NOTE(mikal): there are two options here because sometimes we are handed
+ # a full instance (and could include more information), and other times we
+ # are just handed a UUID for the instance.
+ cfg.StrOpt('instance_format',
+ default='[instance: %(uuid)s] ',
+ help='If an instance is passed with the log message, format '
+ 'it like this'),
+ cfg.StrOpt('instance_uuid_format',
+ default='[instance: %(uuid)s] ',
+ help='If an instance UUID is passed with the log message, '
+ 'format it like this'),
+]
+
+CONF = cfg.CONF
+CONF.register_cli_opts(common_cli_opts)
+CONF.register_cli_opts(logging_cli_opts)
+CONF.register_opts(generic_log_opts)
+CONF.register_opts(log_opts)
+
+# our new audit level
+# NOTE(jkoelker) Since we synthesized an audit level, make the logging
+# module aware of it so it acts like other levels.
+logging.AUDIT = logging.INFO + 1
+logging.addLevelName(logging.AUDIT, 'AUDIT')
+
+
+try:
+ NullHandler = logging.NullHandler
+except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
+ class NullHandler(logging.Handler):
+ def handle(self, record):
+ pass
+
+ def emit(self, record):
+ pass
+
+ def createLock(self):
+ self.lock = None
+
+
+def _dictify_context(context):
+ if context is None:
+ return None
+ if not isinstance(context, dict) and getattr(context, 'to_dict', None):
+ context = context.to_dict()
+ return context
+
+
+def _get_binary_name():
+ return os.path.basename(inspect.stack()[-1][1])
+
+
+def _get_log_file_path(binary=None):
+ logfile = CONF.log_file
+ logdir = CONF.log_dir
+
+ if logfile and not logdir:
+ return logfile
+
+ if logfile and logdir:
+ return os.path.join(logdir, logfile)
+
+ if logdir:
+ binary = binary or _get_binary_name()
+ return '%s.log' % (os.path.join(logdir, binary),)
+
+ return None
+
+
+def mask_password(message, secret="***"):
+ """Replace password with 'secret' in message.
+
+ :param message: The string which includes security information.
+ :param secret: value with which to replace passwords.
+ :returns: The unicode value of message with the password fields masked.
+
+ For example:
+
+ >>> mask_password("'adminPass' : 'aaaaa'")
+ "'adminPass' : '***'"
+ >>> mask_password("'admin_pass' : 'aaaaa'")
+ "'admin_pass' : '***'"
+ >>> mask_password('"password" : "aaaaa"')
+ '"password" : "***"'
+ >>> mask_password("'original_password' : 'aaaaa'")
+ "'original_password' : '***'"
+ >>> mask_password("u'original_password' : u'aaaaa'")
+ "u'original_password' : u'***'"
+ """
+ message = six.text_type(message)
+
+ # NOTE(ldbragst): Check to see if anything in message contains any key
+ # specified in _SANITIZE_KEYS, if not then just return the message since
+ # we don't have to mask any passwords.
+ if not any(key in message for key in _SANITIZE_KEYS):
+ return message
+
+ secret = r'\g<1>' + secret + r'\g<2>'
+ for pattern in _SANITIZE_PATTERNS:
+ message = re.sub(pattern, secret, message)
+ return message
+
+
+class BaseLoggerAdapter(logging.LoggerAdapter):
+
+ def audit(self, msg, *args, **kwargs):
+ self.log(logging.AUDIT, msg, *args, **kwargs)
+
+
+class LazyAdapter(BaseLoggerAdapter):
+ def __init__(self, name='unknown', version='unknown'):
+ self._logger = None
+ self.extra = {}
+ self.name = name
+ self.version = version
+
+ @property
+ def logger(self):
+ if not self._logger:
+ self._logger = getLogger(self.name, self.version)
+ return self._logger
+
+
+class ContextAdapter(BaseLoggerAdapter):
+ warn = logging.LoggerAdapter.warning
+
+ def __init__(self, logger, project_name, version_string):
+ self.logger = logger
+ self.project = project_name
+ self.version = version_string
+ self._deprecated_messages_sent = dict()
+
+ @property
+ def handlers(self):
+ return self.logger.handlers
+
+ def deprecated(self, msg, *args, **kwargs):
+ """Call this method when a deprecated feature is used.
+
+ If the system is configured for fatal deprecations then the message
+ is logged at the 'critical' level and :class:`DeprecatedConfig` will
+ be raised.
+
+ Otherwise, the message will be logged (once) at the 'warn' level.
+
+ :raises: :class:`DeprecatedConfig` if the system is configured for
+ fatal deprecations.
+
+ """
+ stdmsg = _("Deprecated: %s") % msg
+ if CONF.fatal_deprecations:
+ self.critical(stdmsg, *args, **kwargs)
+ raise DeprecatedConfig(msg=stdmsg)
+
+ # Using a list because a tuple with dict can't be stored in a set.
+ sent_args = self._deprecated_messages_sent.setdefault(msg, list())
+
+ if args in sent_args:
+ # Already logged this message, so don't log it again.
+ return
+
+ sent_args.append(args)
+ self.warn(stdmsg, *args, **kwargs)
+
+ def process(self, msg, kwargs):
+ # NOTE(mrodden): catch any Message/other object and
+ # coerce to unicode before they can get
+ # to the python logging and possibly
+ # cause string encoding trouble
+ if not isinstance(msg, six.string_types):
+ msg = six.text_type(msg)
+
+ if 'extra' not in kwargs:
+ kwargs['extra'] = {}
+ extra = kwargs['extra']
+
+ context = kwargs.pop('context', None)
+ if not context:
+ context = getattr(local.store, 'context', None)
+ if context:
+ extra.update(_dictify_context(context))
+
+ instance = kwargs.pop('instance', None)
+ instance_uuid = (extra.get('instance_uuid') or
+ kwargs.pop('instance_uuid', None))
+ instance_extra = ''
+ if instance:
+ instance_extra = CONF.instance_format % instance
+ elif instance_uuid:
+ instance_extra = (CONF.instance_uuid_format
+ % {'uuid': instance_uuid})
+ extra['instance'] = instance_extra
+
+ extra.setdefault('user_identity', kwargs.pop('user_identity', None))
+
+ extra['project'] = self.project
+ extra['version'] = self.version
+ extra['extra'] = extra.copy()
+ return msg, kwargs
+
+
+class JSONFormatter(logging.Formatter):
+ def __init__(self, fmt=None, datefmt=None):
+ # NOTE(jkoelker) we ignore the fmt argument, but its still there
+ # since logging.config.fileConfig passes it.
+ self.datefmt = datefmt
+
+ def formatException(self, ei, strip_newlines=True):
+ lines = traceback.format_exception(*ei)
+ if strip_newlines:
+ lines = [moves.filter(
+ lambda x: x,
+ line.rstrip().splitlines()) for line in lines]
+ lines = list(itertools.chain(*lines))
+ return lines
+
+ def format(self, record):
+ message = {'message': record.getMessage(),
+ 'asctime': self.formatTime(record, self.datefmt),
+ 'name': record.name,
+ 'msg': record.msg,
+ 'args': record.args,
+ 'levelname': record.levelname,
+ 'levelno': record.levelno,
+ 'pathname': record.pathname,
+ 'filename': record.filename,
+ 'module': record.module,
+ 'lineno': record.lineno,
+ 'funcname': record.funcName,
+ 'created': record.created,
+ 'msecs': record.msecs,
+ 'relative_created': record.relativeCreated,
+ 'thread': record.thread,
+ 'thread_name': record.threadName,
+ 'process_name': record.processName,
+ 'process': record.process,
+ 'traceback': None}
+
+ if hasattr(record, 'extra'):
+ message['extra'] = record.extra
+
+ if record.exc_info:
+ message['traceback'] = self.formatException(record.exc_info)
+
+ return jsonutils.dumps(message)
+
+
+def _create_logging_excepthook(product_name):
+ def logging_excepthook(exc_type, value, tb):
+ extra = {}
+ if CONF.verbose or CONF.debug:
+ extra['exc_info'] = (exc_type, value, tb)
+ getLogger(product_name).critical(
+ "".join(traceback.format_exception_only(exc_type, value)),
+ **extra)
+ return logging_excepthook
+
+
+class LogConfigError(Exception):
+
+ message = _('Error loading logging config %(log_config)s: %(err_msg)s')
+
+ def __init__(self, log_config, err_msg):
+ self.log_config = log_config
+ self.err_msg = err_msg
+
+ def __str__(self):
+ return self.message % dict(log_config=self.log_config,
+ err_msg=self.err_msg)
+
+
+def _load_log_config(log_config_append):
+ try:
+ logging.config.fileConfig(log_config_append,
+ disable_existing_loggers=False)
+ except moves.configparser.Error as exc:
+ raise LogConfigError(log_config_append, str(exc))
+
+
+def setup(product_name, version='unknown'):
+ """Setup logging."""
+ if CONF.log_config_append:
+ _load_log_config(CONF.log_config_append)
+ else:
+ _setup_logging_from_conf(product_name, version)
+ sys.excepthook = _create_logging_excepthook(product_name)
+
+
+def set_defaults(logging_context_format_string):
+ cfg.set_defaults(log_opts,
+ logging_context_format_string=
+ logging_context_format_string)
+
+
+def _find_facility_from_conf():
+ facility_names = logging.handlers.SysLogHandler.facility_names
+ facility = getattr(logging.handlers.SysLogHandler,
+ CONF.syslog_log_facility,
+ None)
+
+ if facility is None and CONF.syslog_log_facility in facility_names:
+ facility = facility_names.get(CONF.syslog_log_facility)
+
+ if facility is None:
+ valid_facilities = facility_names.keys()
+ consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
+ 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
+ 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
+ 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
+ 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
+ valid_facilities.extend(consts)
+ raise TypeError(_('syslog facility must be one of: %s') %
+ ', '.join("'%s'" % fac
+ for fac in valid_facilities))
+
+ return facility
+
+
+class RFCSysLogHandler(logging.handlers.SysLogHandler):
+ def __init__(self, *args, **kwargs):
+ self.binary_name = _get_binary_name()
+ super(RFCSysLogHandler, self).__init__(*args, **kwargs)
+
+ def format(self, record):
+ msg = super(RFCSysLogHandler, self).format(record)
+ msg = self.binary_name + ' ' + msg
+ return msg
+
+
+def _setup_logging_from_conf(project, version):
+ log_root = getLogger(None).logger
+ for handler in log_root.handlers:
+ log_root.removeHandler(handler)
+
+ if CONF.use_syslog:
+ facility = _find_facility_from_conf()
+ # TODO(bogdando) use the format provided by RFCSysLogHandler
+ # after existing syslog format deprecation in J
+ if CONF.use_syslog_rfc_format:
+ syslog = RFCSysLogHandler(address='/dev/log',
+ facility=facility)
+ else:
+ syslog = logging.handlers.SysLogHandler(address='/dev/log',
+ facility=facility)
+ log_root.addHandler(syslog)
+
+ logpath = _get_log_file_path()
+ if logpath:
+ filelog = logging.handlers.WatchedFileHandler(logpath)
+ log_root.addHandler(filelog)
+
+ if CONF.use_stderr:
+ streamlog = ColorHandler()
+ log_root.addHandler(streamlog)
+
+ elif not logpath:
+ # pass sys.stdout as a positional argument
+ # python2.6 calls the argument strm, in 2.7 it's stream
+ streamlog = logging.StreamHandler(sys.stdout)
+ log_root.addHandler(streamlog)
+
+ if CONF.publish_errors:
+ handler = importutils.import_object(
+ "teeth_agent.openstack.common.log_handler.PublishErrorsHandler",
+ logging.ERROR)
+ log_root.addHandler(handler)
+
+ datefmt = CONF.log_date_format
+ for handler in log_root.handlers:
+ # NOTE(alaski): CONF.log_format overrides everything currently. This
+ # should be deprecated in favor of context aware formatting.
+ if CONF.log_format:
+ handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
+ datefmt=datefmt))
+ log_root.info('Deprecated: log_format is now deprecated and will '
+ 'be removed in the next release')
+ else:
+ handler.setFormatter(ContextFormatter(project=project,
+ version=version,
+ datefmt=datefmt))
+
+ if CONF.debug:
+ log_root.setLevel(logging.DEBUG)
+ elif CONF.verbose:
+ log_root.setLevel(logging.INFO)
+ else:
+ log_root.setLevel(logging.WARNING)
+
+ for pair in CONF.default_log_levels:
+ mod, _sep, level_name = pair.partition('=')
+ level = logging.getLevelName(level_name)
+ logger = logging.getLogger(mod)
+ logger.setLevel(level)
+
+_loggers = {}
+
+
+def getLogger(name='unknown', version='unknown'):
+ if name not in _loggers:
+ _loggers[name] = ContextAdapter(logging.getLogger(name),
+ name,
+ version)
+ return _loggers[name]
+
+
+def getLazyLogger(name='unknown', version='unknown'):
+ """Returns lazy logger.
+
+ Creates a pass-through logger that does not create the real logger
+ until it is really needed and delegates all calls to the real logger
+ once it is created.
+ """
+ return LazyAdapter(name, version)
+
+
+class WritableLogger(object):
+ """A thin wrapper that responds to `write` and logs."""
+
+ def __init__(self, logger, level=logging.INFO):
+ self.logger = logger
+ self.level = level
+
+ def write(self, msg):
+ self.logger.log(self.level, msg.rstrip())
+
+
+class ContextFormatter(logging.Formatter):
+ """A context.RequestContext aware formatter configured through flags.
+
+ The flags used to set format strings are: logging_context_format_string
+ and logging_default_format_string. You can also specify
+ logging_debug_format_suffix to append extra formatting if the log level is
+ debug.
+
+ For information about what variables are available for the formatter see:
+ http://docs.python.org/library/logging.html#formatter
+
+ If available, uses the context value stored in TLS - local.store.context
+
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Initialize ContextFormatter instance
+
+ Takes additional keyword arguments which can be used in the message
+ format string.
+
+ :keyword project: project name
+ :type project: string
+ :keyword version: project version
+ :type version: string
+
+ """
+
+ self.project = kwargs.pop('project', 'unknown')
+ self.version = kwargs.pop('version', 'unknown')
+
+ logging.Formatter.__init__(self, *args, **kwargs)
+
+ def format(self, record):
+ """Uses contextstring if request_id is set, otherwise default."""
+
+ # store project info
+ record.project = self.project
+ record.version = self.version
+
+ # store request info
+ context = getattr(local.store, 'context', None)
+ if context:
+ d = _dictify_context(context)
+ for k, v in d.items():
+ setattr(record, k, v)
+
+ # NOTE(sdague): default the fancier formatting params
+ # to an empty string so we don't throw an exception if
+ # they get used
+ for key in ('instance', 'color', 'user_identity'):
+ if key not in record.__dict__:
+ record.__dict__[key] = ''
+
+ if record.__dict__.get('request_id'):
+ self._fmt = CONF.logging_context_format_string
+ else:
+ self._fmt = CONF.logging_default_format_string
+
+ if (record.levelno == logging.DEBUG and
+ CONF.logging_debug_format_suffix):
+ self._fmt += " " + CONF.logging_debug_format_suffix
+
+ # Cache this on the record, Logger will respect our formatted copy
+ if record.exc_info:
+ record.exc_text = self.formatException(record.exc_info, record)
+ return logging.Formatter.format(self, record)
+
+ def formatException(self, exc_info, record=None):
+ """Format exception output with CONF.logging_exception_prefix."""
+ if not record:
+ return logging.Formatter.formatException(self, exc_info)
+
+ stringbuffer = moves.StringIO()
+ traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
+ None, stringbuffer)
+ lines = stringbuffer.getvalue().split('\n')
+ stringbuffer.close()
+
+ if CONF.logging_exception_prefix.find('%(asctime)') != -1:
+ record.asctime = self.formatTime(record, self.datefmt)
+
+ formatted_lines = []
+ for line in lines:
+ pl = CONF.logging_exception_prefix % record.__dict__
+ fl = '%s%s' % (pl, line)
+ formatted_lines.append(fl)
+ return '\n'.join(formatted_lines)
+
+
+class ColorHandler(logging.StreamHandler):
+ LEVEL_COLORS = {
+ logging.DEBUG: '\033[00;32m', # GREEN
+ logging.INFO: '\033[00;36m', # CYAN
+ logging.AUDIT: '\033[01;36m', # BOLD CYAN
+ logging.WARN: '\033[01;33m', # BOLD YELLOW
+ logging.ERROR: '\033[01;31m', # BOLD RED
+ logging.CRITICAL: '\033[01;31m', # BOLD RED
+ }
+
+ def format(self, record):
+ record.color = self.LEVEL_COLORS[record.levelno]
+ return logging.StreamHandler.format(self, record)
+
+
+class DeprecatedConfig(Exception):
+ message = _("Fatal call to deprecated config: %(msg)s")
+
+ def __init__(self, msg):
+ super(Exception, self).__init__(self.message % dict(msg=msg))
diff --git a/ironic_python_agent/openstack/common/timeutils.py b/ironic_python_agent/openstack/common/timeutils.py
new file mode 100644
index 00000000..52688a02
--- /dev/null
+++ b/ironic_python_agent/openstack/common/timeutils.py
@@ -0,0 +1,210 @@
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Time related utilities and helper functions.
+"""
+
+import calendar
+import datetime
+import time
+
+import iso8601
+import six
+
+
+# ISO 8601 extended time format with microseconds
+_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
+_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
+PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
+
+
+def isotime(at=None, subsecond=False):
+ """Stringify time in ISO 8601 format."""
+ if not at:
+ at = utcnow()
+ st = at.strftime(_ISO8601_TIME_FORMAT
+ if not subsecond
+ else _ISO8601_TIME_FORMAT_SUBSECOND)
+ tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
+ st += ('Z' if tz == 'UTC' else tz)
+ return st
+
+
+def parse_isotime(timestr):
+ """Parse time from ISO 8601 format."""
+ try:
+ return iso8601.parse_date(timestr)
+ except iso8601.ParseError as e:
+ raise ValueError(six.text_type(e))
+ except TypeError as e:
+ raise ValueError(six.text_type(e))
+
+
+def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
+ """Returns formatted utcnow."""
+ if not at:
+ at = utcnow()
+ return at.strftime(fmt)
+
+
+def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
+ """Turn a formatted time back into a datetime."""
+ return datetime.datetime.strptime(timestr, fmt)
+
+
+def normalize_time(timestamp):
+ """Normalize time in arbitrary timezone to UTC naive object."""
+ offset = timestamp.utcoffset()
+ if offset is None:
+ return timestamp
+ return timestamp.replace(tzinfo=None) - offset
+
+
+def is_older_than(before, seconds):
+ """Return True if before is older than seconds."""
+ if isinstance(before, six.string_types):
+ before = parse_strtime(before).replace(tzinfo=None)
+ else:
+ before = before.replace(tzinfo=None)
+
+ return utcnow() - before > datetime.timedelta(seconds=seconds)
+
+
+def is_newer_than(after, seconds):
+ """Return True if after is newer than seconds."""
+ if isinstance(after, six.string_types):
+ after = parse_strtime(after).replace(tzinfo=None)
+ else:
+ after = after.replace(tzinfo=None)
+
+ return after - utcnow() > datetime.timedelta(seconds=seconds)
+
+
+def utcnow_ts():
+ """Timestamp version of our utcnow function."""
+ if utcnow.override_time is None:
+ # NOTE(kgriffs): This is several times faster
+ # than going through calendar.timegm(...)
+ return int(time.time())
+
+ return calendar.timegm(utcnow().timetuple())
+
+
+def utcnow():
+ """Overridable version of utils.utcnow."""
+ if utcnow.override_time:
+ try:
+ return utcnow.override_time.pop(0)
+ except AttributeError:
+ return utcnow.override_time
+ return datetime.datetime.utcnow()
+
+
+def iso8601_from_timestamp(timestamp):
+ """Returns a iso8601 formatted date from timestamp."""
+ return isotime(datetime.datetime.utcfromtimestamp(timestamp))
+
+
+utcnow.override_time = None
+
+
+def set_time_override(override_time=None):
+ """Overrides utils.utcnow.
+
+ Make it return a constant time or a list thereof, one at a time.
+
+ :param override_time: datetime instance or list thereof. If not
+ given, defaults to the current UTC time.
+ """
+ utcnow.override_time = override_time or datetime.datetime.utcnow()
+
+
+def advance_time_delta(timedelta):
+ """Advance overridden time using a datetime.timedelta."""
+ assert(not utcnow.override_time is None)
+ try:
+ for dt in utcnow.override_time:
+ dt += timedelta
+ except TypeError:
+ utcnow.override_time += timedelta
+
+
+def advance_time_seconds(seconds):
+ """Advance overridden time by seconds."""
+ advance_time_delta(datetime.timedelta(0, seconds))
+
+
+def clear_time_override():
+ """Remove the overridden time."""
+ utcnow.override_time = None
+
+
+def marshall_now(now=None):
+ """Make an rpc-safe datetime with microseconds.
+
+ Note: tzinfo is stripped, but not required for relative times.
+ """
+ if not now:
+ now = utcnow()
+ return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
+ minute=now.minute, second=now.second,
+ microsecond=now.microsecond)
+
+
+def unmarshall_time(tyme):
+ """Unmarshall a datetime dict."""
+ return datetime.datetime(day=tyme['day'],
+ month=tyme['month'],
+ year=tyme['year'],
+ hour=tyme['hour'],
+ minute=tyme['minute'],
+ second=tyme['second'],
+ microsecond=tyme['microsecond'])
+
+
+def delta_seconds(before, after):
+ """Return the difference between two timing objects.
+
+ Compute the difference in seconds between two date, time, or
+ datetime objects (as a float, to microsecond resolution).
+ """
+ delta = after - before
+ return total_seconds(delta)
+
+
+def total_seconds(delta):
+ """Return the total seconds of datetime.timedelta object.
+
+ Compute total seconds of datetime.timedelta, datetime.timedelta
+ doesn't have method total_seconds in Python2.6, calculate it manually.
+ """
+ try:
+ return delta.total_seconds()
+ except AttributeError:
+ return ((delta.days * 24 * 3600) + delta.seconds +
+ float(delta.microseconds) / (10 ** 6))
+
+
+def is_soon(dt, window):
+ """Determines if time is going to happen in the next window seconds.
+
+ :param dt: the time
+ :param window: minimum seconds to remain to consider the time not soon
+
+ :return: True if expiration is within the given duration
+ """
+ soon = (utcnow() + datetime.timedelta(seconds=window))
+ return normalize_time(dt) <= soon
diff --git a/ironic_python_agent/overlord_agent_api.py b/ironic_python_agent/overlord_agent_api.py
new file mode 100644
index 00000000..fda86b22
--- /dev/null
+++ b/ironic_python_agent/overlord_agent_api.py
@@ -0,0 +1,105 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import json
+
+import requests
+
+from teeth_agent import encoding
+from teeth_agent import errors
+
+
+class APIClient(object):
+ api_version = 'v1'
+
+ def __init__(self, api_url):
+ self.api_url = api_url.rstrip('/')
+ self.session = requests.Session()
+ self.encoder = encoding.RESTJSONEncoder()
+
+ def _request(self, method, path, data=None):
+ request_url = '{api_url}{path}'.format(api_url=self.api_url, path=path)
+
+ if data is not None:
+ data = self.encoder.encode(data)
+
+ request_headers = {
+ 'Content-Type': 'application/json',
+ 'Accept': 'application/json',
+ }
+
+ return self.session.request(method,
+ request_url,
+ headers=request_headers,
+ data=data)
+
+ def heartbeat(self, uuid, advertise_address):
+ path = '/{api_version}/nodes/{uuid}/vendor_passthru/heartbeat'.format(
+ api_version=self.api_version,
+ uuid=uuid
+ )
+ data = {
+ 'agent_url': self._get_agent_url(advertise_address)
+ }
+ try:
+ response = self._request('POST', path, data=data)
+ except Exception as e:
+ raise errors.HeartbeatError(str(e))
+
+ if response.status_code != requests.codes.NO_CONTENT:
+ msg = 'Invalid status code: {0}'.format(response.status_code)
+ raise errors.HeartbeatError(msg)
+
+ try:
+ return float(response.headers['Heartbeat-Before'])
+ except KeyError:
+ raise errors.HeartbeatError('Missing Heartbeat-Before header')
+ except Exception:
+ raise errors.HeartbeatError('Invalid Heartbeat-Before header')
+
+ def lookup_node(self, hardware_info):
+ path = '/{api_version}/drivers/teeth/lookup'.format(
+ api_version=self.api_version
+ )
+ # This hardware won't be saved on the node currently, because of how
+ # driver_vendor_passthru is implemented (no node saving).
+ data = {
+ 'hardware': hardware_info,
+ }
+
+ try:
+ response = self._request('POST', path, data=data)
+ except Exception as e:
+ raise errors.LookupNodeError(str(e))
+
+ if response.status_code != requests.codes.OK:
+ msg = 'Invalid status code: {0}'.format(response.status_code)
+ raise errors.LookupNodeError(msg)
+
+ try:
+ content = json.loads(response.content)
+ except Exception as e:
+ raise errors.LookupNodeError('Error decoding response: '
+ + str(e))
+
+ if 'node' not in content or 'uuid' not in content['node']:
+ raise errors.LookupNodeError('Got invalid data from the API: '
+ '{0}'.format(content))
+ return content['node']
+
+ def _get_agent_url(self, advertise_address):
+ return 'http://{0}:{1}'.format(advertise_address[0],
+ advertise_address[1])
diff --git a/ironic_python_agent/shell/__init__.py b/ironic_python_agent/shell/__init__.py
new file mode 100644
index 00000000..2a30de06
--- /dev/null
+++ b/ironic_python_agent/shell/__init__.py
@@ -0,0 +1,15 @@
+"""
+Copyright 2014 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/ironic_python_agent/shell/copy_configdrive_to_disk.sh b/ironic_python_agent/shell/copy_configdrive_to_disk.sh
new file mode 100755
index 00000000..daeae1d7
--- /dev/null
+++ b/ironic_python_agent/shell/copy_configdrive_to_disk.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+set -e
+
+log() {
+ echo "`basename $0`: $@"
+}
+
+usage() {
+ [[ -z "$1" ]] || echo -e "USAGE ERROR: $@\n"
+ echo "`basename $0`: CONFIGDRIVE_DIR DEVICE"
+ echo " - This script injects CONFIGDRIVE_DIR contents as an iso9660"
+ echo " filesystem on a partition at the end of DEVICE."
+ exit 1
+}
+
+CONFIGDRIVE_DIR="$1"
+DEVICE="$2"
+
+[[ -d $CONFIGDRIVE_DIR ]] || usage "$CONFIGDRIVE_DIR (CONFIGDRIVE_DIR) is not a directory"
+[[ -b $DEVICE ]] || usage "$DEVICE (DEVICE) is not a block device"
+
+# Create small partition at the end of the device
+log "Adding configdrive partition to $DEVICE"
+parted -a optimal -s -- $DEVICE mkpart primary ext2 -16MiB -0
+
+# Find partition we just created
+# Dump all partitions, ignore empty ones, then get the last partition ID
+ISO_PARTITION=`sfdisk --dump $DEVICE | grep -v ' 0,' | tail -n1 | awk '{print $1}'`
+
+# This generates the ISO image of the config drive.
+log "Writing Configdrive contents in $CONFIGDRIVE_DIR to $ISO_PARTITION"
+genisoimage \
+ -o ${ISO_PARTITION} \
+ -ldots \
+ -input-charset 'utf-8' \
+ -allow-lowercase \
+ -allow-multidot \
+ -l \
+ -publisher "teeth" \
+ -J \
+ -r \
+ -V 'config-2' \
+ ${CONFIGDRIVE_DIR}
+
+log "${DEVICE} imaged successfully!"
diff --git a/ironic_python_agent/shell/reboot.sh b/ironic_python_agent/shell/reboot.sh
new file mode 100644
index 00000000..ef9832f5
--- /dev/null
+++ b/ironic_python_agent/shell/reboot.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+#
+# This script reboots by echoing into /proc/sysrq_trigger.
+
+set -e
+
+echo "s" > /proc/sysrq-trigger
+echo "b" > /proc/sysrq-trigger
diff --git a/ironic_python_agent/shell/write_image.sh b/ironic_python_agent/shell/write_image.sh
new file mode 100755
index 00000000..ad68298b
--- /dev/null
+++ b/ironic_python_agent/shell/write_image.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+# This should work with almost any image that uses MBR partitioning and doesn't already
+# have 3 or more partitions.
+
+set -e
+
+log() {
+ echo "`basename $0`: $@"
+}
+
+usage() {
+ [[ -z "$1" ]] || echo -e "USAGE ERROR: $@\n"
+ echo "`basename $0`: IMAGEFILE DEVICE"
+ echo " - This script images DEVICE with IMAGEFILE"
+ exit 1
+}
+
+IMAGEFILE="$1"
+DEVICE="$2"
+
+[[ -f $IMAGEFILE ]] || usage "$IMAGEFILE (IMAGEFILE) is not a file"
+[[ -b $DEVICE ]] || usage "$DEVICE (DEVICE) is not a block device"
+
+# In production this will be replaced with secure erasing the drives
+# For now we need to ensure there aren't any old (GPT) partitions on the drive
+log "Erasing existing mbr from ${DEVICE}"
+dd if=/dev/zero of=$DEVICE bs=512 count=10
+
+## Doing two steps allows us to use dd, which allows us to tweak things like
+## blocksize and allows use of direct io
+# Converts image to raw
+log "Imaging $IMAGEFILE to RAW format"
+qemu-img convert -O raw $IMAGEFILE /tmp/image.raw
+
+# Write image onto device
+log "Imaging $DEVICE"
+dd if=/tmp/image.raw of=$DEVICE bs=64K oflag=direct
+
+log "${DEVICE} imaged successfully!"
diff --git a/ironic_python_agent/standby.py b/ironic_python_agent/standby.py
new file mode 100644
index 00000000..3e10f16a
--- /dev/null
+++ b/ironic_python_agent/standby.py
@@ -0,0 +1,201 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import hashlib
+import os
+import requests
+import subprocess
+import time
+
+from teeth_agent import base
+from teeth_agent import configdrive
+from teeth_agent import decorators
+from teeth_agent import errors
+from teeth_agent import hardware
+from teeth_agent.openstack.common import log
+
+LOG = log.getLogger(__name__)
+
+
+def _configdrive_location():
+ return '/tmp/configdrive'
+
+
+def _image_location(image_info):
+ return '/tmp/{0}'.format(image_info['id'])
+
+
+def _path_to_script(script):
+ cwd = os.path.dirname(os.path.realpath(__file__))
+ return os.path.join(cwd, script)
+
+
+def _write_image(image_info, device):
+ starttime = time.time()
+ image = _image_location(image_info)
+
+ script = _path_to_script('shell/write_image.sh')
+ command = ['/bin/bash', script, image, device]
+ LOG.info('Writing image with command: {0}'.format(' '.join(command)))
+ exit_code = subprocess.call(command)
+ if exit_code != 0:
+ raise errors.ImageWriteError(exit_code, device)
+ totaltime = time.time() - starttime
+ LOG.info('Image {0} written to device {1} in {2} seconds'.format(
+ image, device, totaltime))
+
+
+def _copy_configdrive_to_disk(configdrive_dir, device):
+ starttime = time.time()
+ script = _path_to_script('shell/copy_configdrive_to_disk.sh')
+ command = ['/bin/bash', script, configdrive_dir, device]
+ LOG.info('copying configdrive to disk with command {0}'.format(
+ ' '.join(command)))
+ exit_code = subprocess.call(command)
+
+ if exit_code != 0:
+ raise errors.ConfigDriveWriteError(exit_code, device)
+
+ totaltime = time.time() - starttime
+ LOG.info('configdrive copied from {0} to {1} in {2} seconds'.format(
+ configdrive_dir,
+ device,
+ totaltime))
+
+
+def _request_url(image_info, url):
+ resp = requests.get(url, stream=True)
+ if resp.status_code != 200:
+ raise errors.ImageDownloadError(image_info['id'])
+ return resp
+
+
+def _download_image(image_info):
+ starttime = time.time()
+ resp = None
+ for url in image_info['urls']:
+ try:
+ LOG.info("Attempting to download image from {0}".format(url))
+ resp = _request_url(image_info, url)
+ except errors.ImageDownloadError:
+ failtime = time.time() - starttime
+ log_msg = "Image download failed. URL: {0}; time: {1} seconds"
+ LOG.warning(log_msg.format(url, failtime))
+ continue
+ else:
+ break
+ if resp is None:
+ raise errors.ImageDownloadError(image_info['id'])
+
+ image_location = _image_location(image_info)
+ with open(image_location, 'wb') as f:
+ try:
+ for chunk in resp.iter_content(1024 * 1024):
+ f.write(chunk)
+ except Exception:
+ raise errors.ImageDownloadError(image_info['id'])
+
+ totaltime = time.time() - starttime
+ LOG.info("Image downloaded from {0} in {1} seconds".format(image_location,
+ totaltime))
+
+ if not _verify_image(image_info, image_location):
+ raise errors.ImageChecksumError(image_info['id'])
+
+
+def _verify_image(image_info, image_location):
+ hashes = image_info['hashes']
+ for k, v in hashes.iteritems():
+ algo = getattr(hashlib, k, None)
+ if algo is None:
+ continue
+ log_msg = 'Verifying image at {0} with algorithm {1} against hash {2}'
+ LOG.debug(log_msg.format(image_location, k, v))
+ hash_ = algo(open(image_location).read()).hexdigest()
+ if hash_ == v:
+ return True
+ else:
+ log_msg = ('Image verification failed. Location: {0};'
+ 'algorithm: {1}; image hash: {2};'
+ 'verification hash: {3}')
+ LOG.warning(log_msg.format(image_location, k, hash_, v))
+ return False
+
+
+def _validate_image_info(image_info=None, **kwargs):
+ image_info = image_info or {}
+
+ for field in ['id', 'urls', 'hashes']:
+ if field not in image_info:
+ msg = 'Image is missing \'{0}\' field.'.format(field)
+ raise errors.InvalidCommandParamsError(msg)
+
+ if type(image_info['urls']) != list or not image_info['urls']:
+ raise errors.InvalidCommandParamsError(
+ 'Image \'urls\' must be a list with at least one element.')
+
+ if type(image_info['hashes']) != dict or not image_info['hashes']:
+ raise errors.InvalidCommandParamsError(
+ 'Image \'hashes\' must be a dictionary with at least one '
+ 'element.')
+
+
+class StandbyMode(base.BaseAgentMode):
+ def __init__(self):
+ super(StandbyMode, self).__init__('STANDBY')
+ self.command_map['cache_image'] = self.cache_image
+ self.command_map['prepare_image'] = self.prepare_image
+ self.command_map['run_image'] = self.run_image
+
+ self.cached_image_id = None
+
+ @decorators.async_command(_validate_image_info)
+ def cache_image(self, command_name, image_info=None, force=False):
+ device = hardware.get_manager().get_os_install_device()
+
+ if self.cached_image_id != image_info['id'] or force:
+ _download_image(image_info)
+ _write_image(image_info, device)
+ self.cached_image_id = image_info['id']
+
+ @decorators.async_command(_validate_image_info)
+ def prepare_image(self,
+ command_name,
+ image_info=None,
+ metadata=None,
+ files=None):
+ location = _configdrive_location()
+ device = hardware.get_manager().get_os_install_device()
+
+ # don't write image again if already cached
+ if self.cached_image_id != image_info['id']:
+ _download_image(image_info)
+ _write_image(image_info, device)
+ self.cached_image_id = image_info['id']
+
+ LOG.debug('Writing configdrive to {0}'.format(location))
+ configdrive.write_configdrive(location, metadata, files)
+ _copy_configdrive_to_disk(location, device)
+
+ @decorators.async_command()
+ def run_image(self, command_name):
+ script = _path_to_script('shell/reboot.sh')
+ LOG.info('Rebooting system')
+ command = ['/bin/bash', script]
+ # this should never return if successful
+ exit_code = subprocess.call(command)
+ if exit_code != 0:
+ raise errors.SystemRebootError(exit_code)
diff --git a/ironic_python_agent/tests/__init__.py b/ironic_python_agent/tests/__init__.py
new file mode 100644
index 00000000..13e76de9
--- /dev/null
+++ b/ironic_python_agent/tests/__init__.py
@@ -0,0 +1,15 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
diff --git a/ironic_python_agent/tests/agent.py b/ironic_python_agent/tests/agent.py
new file mode 100644
index 00000000..8f90e7a6
--- /dev/null
+++ b/ironic_python_agent/tests/agent.py
@@ -0,0 +1,223 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import json
+import time
+import unittest
+
+import mock
+import pkg_resources
+from wsgiref import simple_server
+
+
+from teeth_agent import agent
+from teeth_agent import base
+from teeth_agent import encoding
+from teeth_agent import errors
+from teeth_agent import hardware
+
+EXPECTED_ERROR = RuntimeError('command execution failed')
+
+
+def foo_execute(*args, **kwargs):
+ if kwargs['fail']:
+ raise EXPECTED_ERROR
+ else:
+ return 'command execution succeeded'
+
+
+class FakeMode(base.BaseAgentMode):
+ def __init__(self):
+ super(FakeMode, self).__init__('FAKE')
+
+
+class TestHeartbeater(unittest.TestCase):
+ def setUp(self):
+ self.mock_agent = mock.Mock()
+ self.heartbeater = agent.TeethAgentHeartbeater(self.mock_agent)
+ self.heartbeater.api = mock.Mock()
+ self.heartbeater.hardware = mock.create_autospec(
+ hardware.HardwareManager)
+ self.heartbeater.stop_event = mock.Mock()
+
+ @mock.patch('teeth_agent.agent._time')
+ @mock.patch('random.uniform')
+ def test_heartbeat(self, mocked_uniform, mocked_time):
+ time_responses = []
+ uniform_responses = []
+ heartbeat_responses = []
+ wait_responses = []
+ expected_stop_event_calls = []
+
+ # FIRST RUN:
+ # initial delay is 0
+ expected_stop_event_calls.append(mock.call(0))
+ wait_responses.append(False)
+ # next heartbeat due at t=100
+ heartbeat_responses.append(100)
+ # random interval multiplier is 0.5
+ uniform_responses.append(0.5)
+ # time is now 50
+ time_responses.append(50)
+
+ # SECOND RUN:
+ # (100 - 50) * .5 = 25 (t becomes ~75)
+ expected_stop_event_calls.append(mock.call(25.0))
+ wait_responses.append(False)
+ # next heartbeat due at t=180
+ heartbeat_responses.append(180)
+ # random interval multiplier is 0.4
+ uniform_responses.append(0.4)
+ # time is now 80
+ time_responses.append(80)
+
+ # THIRD RUN:
+ # (180 - 80) * .4 = 40 (t becomes ~120)
+ expected_stop_event_calls.append(mock.call(40.0))
+ wait_responses.append(False)
+ # this heartbeat attempt fails
+ heartbeat_responses.append(Exception('uh oh!'))
+ # we check the time to generate a fake deadline, now t=125
+ time_responses.append(125)
+ # random interval multiplier is 0.5
+ uniform_responses.append(0.5)
+ # time is now 125.5
+ time_responses.append(125.5)
+
+ # FOURTH RUN:
+ # (125.5 - 125.0) * .5 = 0.25
+ expected_stop_event_calls.append(mock.call(0.25))
+ # Stop now
+ wait_responses.append(True)
+
+ # Hook it up and run it
+ mocked_time.side_effect = time_responses
+ mocked_uniform.side_effect = uniform_responses
+ self.heartbeater.api.heartbeat.side_effect = heartbeat_responses
+ self.heartbeater.stop_event.wait.side_effect = wait_responses
+ self.heartbeater.run()
+
+ # Validate expectations
+ self.assertEqual(self.heartbeater.stop_event.wait.call_args_list,
+ expected_stop_event_calls)
+ self.assertEqual(self.heartbeater.error_delay, 2.7)
+
+
+class TestBaseAgent(unittest.TestCase):
+ def setUp(self):
+ self.encoder = encoding.RESTJSONEncoder(indent=4)
+ self.agent = agent.TeethAgent('https://fake_api.example.org:8081/',
+ ('203.0.113.1', 9990),
+ ('192.0.2.1', 9999))
+
+ def assertEqualEncoded(self, a, b):
+ # Evidently JSONEncoder.default() can't handle None (??) so we have to
+ # use encode() to generate JSON, then json.loads() to get back a python
+ # object.
+ a_encoded = self.encoder.encode(a)
+ b_encoded = self.encoder.encode(b)
+ self.assertEqual(json.loads(a_encoded), json.loads(b_encoded))
+
+ def test_get_status(self):
+ started_at = time.time()
+ self.agent.started_at = started_at
+
+ status = self.agent.get_status()
+ self.assertTrue(isinstance(status, agent.TeethAgentStatus))
+ self.assertEqual(status.started_at, started_at)
+ self.assertEqual(status.version,
+ pkg_resources.get_distribution('teeth-agent').version)
+
+ def test_execute_command(self):
+ do_something_impl = mock.Mock()
+ self.agent.mode_implementation = FakeMode()
+ command_map = self.agent.mode_implementation.command_map
+ command_map['do_something'] = do_something_impl
+
+ self.agent.execute_command('fake.do_something', foo='bar')
+ do_something_impl.assert_called_once_with('do_something', foo='bar')
+
+ def test_execute_invalid_command(self):
+ self.assertRaises(errors.InvalidCommandError,
+ self.agent.execute_command,
+ 'do_something',
+ foo='bar')
+
+ @mock.patch('wsgiref.simple_server.make_server', autospec=True)
+ def test_run(self, wsgi_server_cls):
+ wsgi_server = wsgi_server_cls.return_value
+ wsgi_server.start.side_effect = KeyboardInterrupt()
+
+ self.agent.heartbeater = mock.Mock()
+ self.agent.api_client.lookup_node = mock.Mock()
+ self.agent.run()
+
+ listen_addr = ('192.0.2.1', 9999)
+ wsgi_server_cls.assert_called_once_with(
+ listen_addr[0],
+ listen_addr[1],
+ self.agent.api,
+ server_class=simple_server.WSGIServer)
+ wsgi_server.serve_forever.assert_called_once()
+
+ self.agent.heartbeater.start.assert_called_once_with()
+
+ def test_async_command_success(self):
+ result = base.AsyncCommandResult('foo_command', {'fail': False},
+ foo_execute)
+ expected_result = {
+ 'id': result.id,
+ 'command_name': 'foo_command',
+ 'command_params': {
+ 'fail': False,
+ },
+ 'command_status': 'RUNNING',
+ 'command_result': None,
+ 'command_error': None,
+ }
+ self.assertEqualEncoded(result, expected_result)
+
+ result.start()
+ result.join()
+
+ expected_result['command_status'] = 'SUCCEEDED'
+ expected_result['command_result'] = 'command execution succeeded'
+
+ self.assertEqualEncoded(result, expected_result)
+
+ def test_async_command_failure(self):
+ result = base.AsyncCommandResult('foo_command', {'fail': True},
+ foo_execute)
+ expected_result = {
+ 'id': result.id,
+ 'command_name': 'foo_command',
+ 'command_params': {
+ 'fail': True,
+ },
+ 'command_status': 'RUNNING',
+ 'command_result': None,
+ 'command_error': None,
+ }
+ self.assertEqualEncoded(result, expected_result)
+
+ result.start()
+ result.join()
+
+ expected_result['command_status'] = 'FAILED'
+ expected_result['command_error'] = errors.CommandExecutionError(
+ str(EXPECTED_ERROR))
+
+ self.assertEqualEncoded(result, expected_result)
diff --git a/ironic_python_agent/tests/api.py b/ironic_python_agent/tests/api.py
new file mode 100644
index 00000000..c1fc622e
--- /dev/null
+++ b/ironic_python_agent/tests/api.py
@@ -0,0 +1,257 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import mock
+import time
+import unittest
+
+import pecan
+import pecan.testing
+
+from teeth_agent import agent
+from teeth_agent import base
+
+
+PATH_PREFIX = '/v1'
+
+
+class TestTeethAPI(unittest.TestCase):
+
+ def setUp(self):
+ super(TestTeethAPI, self).setUp()
+ self.mock_agent = mock.MagicMock()
+ self.app = self._make_app(self.mock_agent)
+
+ def tearDown(self):
+ pecan.set_config({}, overwrite=True)
+
+ def _make_app(self, enable_acl=False):
+ self.config = {
+ 'app': {
+ 'root': 'teeth_agent.api.controllers.root.RootController',
+ 'modules': ['teeth_agent.api'],
+ 'static_root': '',
+ 'debug': True,
+ },
+ }
+
+ return pecan.testing.load_test_app(config=self.config,
+ agent=self.mock_agent)
+
+ def _request_json(self, path, params, expect_errors=False, headers=None,
+ method="post", extra_environ=None, status=None,
+ path_prefix=PATH_PREFIX):
+ """Sends simulated HTTP request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param method: Request method type. Appropriate method function call
+ should be used rather than passing attribute in.
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ :param path_prefix: prefix of the url path
+ """
+ full_path = path_prefix + path
+ print('%s: %s %s' % (method.upper(), full_path, params))
+ response = getattr(self.app, "%s_json" % method)(
+ str(full_path),
+ params=params,
+ headers=headers,
+ status=status,
+ extra_environ=extra_environ,
+ expect_errors=expect_errors
+ )
+ print('GOT:%s' % response)
+ return response
+
+ def put_json(self, path, params, expect_errors=False, headers=None,
+ extra_environ=None, status=None):
+ """Sends simulated HTTP PUT request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ return self._request_json(path=path, params=params,
+ expect_errors=expect_errors,
+ headers=headers, extra_environ=extra_environ,
+ status=status, method="put")
+
+ def post_json(self, path, params, expect_errors=False, headers=None,
+ extra_environ=None, status=None):
+ """Sends simulated HTTP POST request to Pecan test app.
+
+ :param path: url path of target service
+ :param params: content for wsgi.input of request
+ :param expect_errors: Boolean value; whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param status: expected status code of response
+ """
+ return self._request_json(path=path, params=params,
+ expect_errors=expect_errors,
+ headers=headers, extra_environ=extra_environ,
+ status=status, method="post")
+
+ def get_json(self, path, expect_errors=False, headers=None,
+ extra_environ=None, q=[], path_prefix=PATH_PREFIX, **params):
+ """Sends simulated HTTP GET request to Pecan test app.
+
+ :param path: url path of target service
+ :param expect_errors: Boolean value;whether an error is expected based
+ on request
+ :param headers: a dictionary of headers to send along with the request
+ :param extra_environ: a dictionary of environ variables to send along
+ with the request
+ :param q: list of queries consisting of: field, value, op, and type
+ keys
+ :param path_prefix: prefix of the url path
+ :param params: content for wsgi.input of request
+ """
+ full_path = path_prefix + path
+ query_params = {'q.field': [],
+ 'q.value': [],
+ 'q.op': [],
+ }
+ for query in q:
+ for name in ['field', 'op', 'value']:
+ query_params['q.%s' % name].append(query.get(name, ''))
+ all_params = {}
+ all_params.update(params)
+ if q:
+ all_params.update(query_params)
+ print('GET: %s %r' % (full_path, all_params))
+ response = self.app.get(full_path,
+ params=all_params,
+ headers=headers,
+ extra_environ=extra_environ,
+ expect_errors=expect_errors)
+ print('GOT:%s' % response)
+ return response
+
+ def test_root(self):
+ response = self.get_json('/', path_prefix='')
+ data = response.json
+ self.assertEqual(data['name'], 'OpenStack Ironic Python Agent API')
+
+ def test_v1_root(self):
+ response = self.get_json('/v1', path_prefix='')
+ data = response.json
+ self.assertTrue('status' in data.keys())
+ self.assertTrue('commands' in data.keys())
+
+ def test_get_agent_status(self):
+ status = agent.TeethAgentStatus('TEST_MODE', time.time(), 'v72ac9')
+ self.mock_agent.get_status.return_value = status
+
+ response = self.get_json('/status')
+ self.mock_agent.get_status.assert_called_once_with()
+
+ self.assertEqual(response.status_code, 200)
+ data = response.json
+ self.assertEqual(data['mode'], status.mode)
+ self.assertEqual(data['started_at'], status.started_at)
+ self.assertEqual(data['version'], status.version)
+
+ def test_execute_agent_command_success(self):
+ command = {
+ 'name': 'do_things',
+ 'params': {'key': 'value'},
+ }
+
+ result = base.SyncCommandResult(command['name'],
+ command['params'],
+ True,
+ {'test': 'result'})
+
+ self.mock_agent.execute_command.return_value = result
+
+ response = self.post_json('/commands', command)
+ self.assertEqual(response.status_code, 200)
+
+ self.assertEqual(self.mock_agent.execute_command.call_count, 1)
+ args, kwargs = self.mock_agent.execute_command.call_args
+ self.assertEqual(args, ('do_things',))
+ self.assertEqual(kwargs, {'key': 'value'})
+ expected_result = result.serialize()
+ data = response.json
+ self.assertEqual(data, expected_result)
+
+ def test_execute_agent_command_validation(self):
+ invalid_command = {}
+ response = self.post_json('/commands',
+ invalid_command,
+ expect_errors=True)
+ self.assertEqual(response.status_code, 400)
+ data = response.json
+ msg = 'Invalid input for field/attribute name.'
+ self.assertTrue(msg in data['faultstring'])
+ msg = 'Mandatory field missing'
+ self.assertTrue(msg in data['faultstring'])
+
+ def test_execute_agent_command_params_validation(self):
+ invalid_command = {'name': 'do_things', 'params': []}
+ response = self.post_json('/commands',
+ invalid_command,
+ expect_errors=True)
+ self.assertEqual(response.status_code, 400)
+ data = response.json
+ # this message is actually much longer, but I'm ok with this
+ msg = 'Invalid input for field/attribute params.'
+ self.assertTrue(msg in data['faultstring'])
+
+ def test_list_command_results(self):
+ cmd_result = base.SyncCommandResult(u'do_things',
+ {u'key': u'value'},
+ True,
+ {u'test': u'result'})
+
+ self.mock_agent.list_command_results.return_value = [
+ cmd_result,
+ ]
+
+ response = self.get_json('/commands')
+ self.assertEqual(response.status_code, 200)
+ self.assertEqual(response.json, {
+ u'commands': [
+ cmd_result.serialize(),
+ ],
+ })
+
+ def test_get_command_result(self):
+ cmd_result = base.SyncCommandResult('do_things',
+ {'key': 'value'},
+ True,
+ {'test': 'result'})
+ serialized_cmd_result = cmd_result.serialize()
+
+ self.mock_agent.get_command_result.return_value = cmd_result
+
+ response = self.get_json('/commands/abc123')
+ self.assertEqual(response.status_code, 200)
+ data = response.json
+ self.assertEqual(data, serialized_cmd_result)
diff --git a/ironic_python_agent/tests/configdrive.py b/ironic_python_agent/tests/configdrive.py
new file mode 100644
index 00000000..915d939a
--- /dev/null
+++ b/ironic_python_agent/tests/configdrive.py
@@ -0,0 +1,165 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+from __future__ import unicode_literals
+
+import base64
+import json
+import mock
+import unittest
+
+from teeth_agent import configdrive
+from teeth_agent import utils
+
+
+class ConfigDriveWriterTestCase(unittest.TestCase):
+ def setUp(self):
+ self.writer = configdrive.ConfigDriveWriter()
+ self.maxDiff = None
+
+ def test_add_metadata(self):
+ self.writer.add_metadata('admin_pass', 'password')
+ metadata = self.writer.metadata
+ self.assertEqual(metadata, {'admin_pass': 'password'})
+
+ def test_add_file(self):
+ self.writer.add_file('/etc/filename', 'contents')
+ files = self.writer.files
+ self.assertEqual(files, {'/etc/filename': 'contents'})
+
+ @mock.patch('os.makedirs', autospec=True)
+ @mock.patch('__builtin__.open', autospec=True)
+ def test_write_no_files(self, open_mock, makedirs_mock):
+ metadata = {'admin_pass': 'password', 'hostname': 'test'}
+ json_metadata = json.dumps(metadata)
+ metadata_path = '/lol/teeth/latest/meta_data.json'
+ for k, v in metadata.iteritems():
+ self.writer.add_metadata(k, v)
+
+ open_mock.return_value.__enter__ = lambda s: s
+ open_mock.return_value.__exit__ = mock.Mock()
+ write_mock = open_mock.return_value.write
+
+ self.writer.write('/lol', prefix='teeth', version='latest')
+ open_mock.assert_called_once_with(metadata_path, 'wb')
+ write_mock.assert_called_once_with(json_metadata)
+ makedirs_calls = [
+ mock.call('/lol/teeth/latest'),
+ mock.call('/lol/teeth/content')
+ ]
+ self.assertEqual(makedirs_calls, makedirs_mock.call_args_list)
+
+ @mock.patch('os.makedirs', autospec=True)
+ @mock.patch('__builtin__.open', autospec=True)
+ def test_write_with_files(self, open_mock, makedirs_mock):
+ metadata = {'admin_pass': 'password', 'hostname': 'test'}
+ for k, v in metadata.iteritems():
+ self.writer.add_metadata(k, v)
+ files = utils.get_ordereddict([
+ ('/etc/conf0', 'contents0'),
+ ('/etc/conf1', 'contents1'),
+ ])
+ for path, contents in files.iteritems():
+ self.writer.add_file(path, contents)
+
+ open_mock.return_value.__enter__ = lambda s: s
+ open_mock.return_value.__exit__ = mock.Mock()
+ write_mock = open_mock.return_value.write
+
+ metadata = self.writer.metadata
+ metadata['files'] = [
+ {'content_path': '/content/0000', 'path': '/etc/conf0'},
+ {'content_path': '/content/0001', 'path': '/etc/conf1'},
+ ]
+
+ self.writer.write('/lol', prefix='openstack', version='latest')
+
+ # have to pull out the JSON passed to write and parse it
+ # because arbitrary dictionary ordering, etc
+ calls = write_mock.mock_calls
+ json_data = calls[-1][1][0]
+ data = json.loads(json_data)
+ self.assertEqual(data, metadata)
+
+ open_calls = [
+ mock.call('/lol/openstack/content/0000', 'wb'),
+ mock.call().write('contents0'),
+ mock.call().__exit__(None, None, None),
+ mock.call('/lol/openstack/content/0001', 'wb'),
+ mock.call().write('contents1'),
+ mock.call().__exit__(None, None, None),
+ mock.call('/lol/openstack/latest/meta_data.json', 'wb'),
+ # already checked
+ mock.call().write(mock.ANY),
+ mock.call().__exit__(None, None, None),
+ ]
+ self.assertEqual(open_mock.mock_calls, open_calls)
+
+ makedirs_calls = [
+ mock.call('/lol/openstack/latest'),
+ mock.call('/lol/openstack/content')
+ ]
+ self.assertEqual(makedirs_calls, makedirs_mock.call_args_list)
+
+ @mock.patch('os.makedirs', autospec=True)
+ @mock.patch('__builtin__.open', autospec=True)
+ def test_write_configdrive(self, open_mock, makedirs_mock):
+ metadata = {'admin_pass': 'password', 'hostname': 'test'}
+ files = utils.get_ordereddict([
+ ('/etc/conf0', base64.b64encode('contents0')),
+ ('/etc/conf1', base64.b64encode('contents1')),
+ ])
+ metadata['files'] = [
+ {'content_path': '/content/0000', 'path': '/etc/conf0'},
+ {'content_path': '/content/0001', 'path': '/etc/conf1'},
+ ]
+
+ open_mock.return_value.__enter__ = lambda s: s
+ open_mock.return_value.__exit__ = mock.Mock()
+ write_mock = open_mock.return_value.write
+
+ configdrive.write_configdrive('/lol',
+ metadata,
+ files,
+ prefix='openstack',
+ version='latest')
+
+ # have to pull out the JSON passed to write and parse it
+ # because arbitrary dictionary ordering, etc
+ calls = write_mock.mock_calls
+ json_data = calls[-1][1][0]
+ data = json.loads(json_data)
+ self.assertEqual(data, metadata)
+
+ open_calls = [
+ mock.call('/lol/openstack/content/0000', 'wb'),
+ mock.call().write('contents0'),
+ mock.call().__exit__(None, None, None),
+ mock.call('/lol/openstack/content/0001', 'wb'),
+ mock.call().write('contents1'),
+ mock.call().__exit__(None, None, None),
+ mock.call('/lol/openstack/latest/meta_data.json', 'wb'),
+ # already checked
+ mock.call().write(mock.ANY),
+ mock.call().__exit__(None, None, None),
+ ]
+ self.assertEqual(open_mock.mock_calls, open_calls)
+
+ makedirs_calls = [
+ mock.call('/lol/openstack/latest'),
+ mock.call('/lol/openstack/content')
+ ]
+ self.assertEqual(makedirs_calls, makedirs_mock.call_args_list)
diff --git a/ironic_python_agent/tests/decom.py b/ironic_python_agent/tests/decom.py
new file mode 100644
index 00000000..920f2b97
--- /dev/null
+++ b/ironic_python_agent/tests/decom.py
@@ -0,0 +1,27 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import unittest
+
+from teeth_agent import decom
+
+
+class TestDecomMode(unittest.TestCase):
+ def setUp(self):
+ self.agent_mode = decom.DecomMode()
+
+ def test_decom_mode(self):
+ self.assertEqual(self.agent_mode.name, 'DECOM')
diff --git a/ironic_python_agent/tests/hardware.py b/ironic_python_agent/tests/hardware.py
new file mode 100644
index 00000000..8e4a75d6
--- /dev/null
+++ b/ironic_python_agent/tests/hardware.py
@@ -0,0 +1,67 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import mock
+import unittest
+
+from teeth_agent import hardware
+
+
+class TestGenericHardwareManager(unittest.TestCase):
+ def setUp(self):
+ self.hardware = hardware.GenericHardwareManager()
+
+ @mock.patch('os.listdir')
+ @mock.patch('os.path.exists')
+ @mock.patch('__builtin__.open')
+ def test_list_network_interfaces(self,
+ mocked_open,
+ mocked_exists,
+ mocked_listdir):
+ mocked_listdir.return_value = ['lo', 'eth0']
+ mocked_exists.side_effect = [False, True]
+ mocked_open.return_value.read.return_value = '00:0c:29:8c:11:b1\n'
+ interfaces = self.hardware.list_network_interfaces()
+ self.assertEqual(len(interfaces), 1)
+ self.assertEqual(interfaces[0].name, 'eth0')
+ self.assertEqual(interfaces[0].mac_address, '00:0c:29:8c:11:b1')
+
+ def test_get_os_install_device(self):
+ self.hardware._cmd = mock.Mock()
+ self.hardware._cmd.return_value = (
+ 'RO RA SSZ BSZ StartSec Size Device\n'
+ 'rw 256 512 4096 0 249578283616 /dev/sda\n'
+ 'rw 256 512 4096 2048 8587837440 /dev/sda1\n'
+ 'rw 256 512 4096 124967424 15728640 /dev/sda2\n'
+ 'rw 256 512 4096 0 31016853504 /dev/sdb\n'
+ 'rw 256 512 4096 0 249578283616 /dev/sdc\n', '')
+
+ self.assertEqual(self.hardware.get_os_install_device(), '/dev/sdb')
+ self.hardware._cmd.assert_called_once_with(['blockdev', '--report'])
+
+ def test_list_hardwre_info(self):
+ self.hardware.list_network_interfaces = mock.Mock()
+ self.hardware.list_network_interfaces.return_value = [
+ hardware.NetworkInterface('eth0', '00:0c:29:8c:11:b1'),
+ hardware.NetworkInterface('eth1', '00:0c:29:8c:11:b2'),
+ ]
+
+ hardware_info = self.hardware.list_hardware_info()
+ self.assertEqual(len(hardware_info), 2)
+ self.assertEqual(hardware_info[0].type, 'mac_address')
+ self.assertEqual(hardware_info[1].type, 'mac_address')
+ self.assertEqual(hardware_info[0].id, '00:0c:29:8c:11:b1')
+ self.assertEqual(hardware_info[1].id, '00:0c:29:8c:11:b2')
diff --git a/ironic_python_agent/tests/overlord_agent_api.py b/ironic_python_agent/tests/overlord_agent_api.py
new file mode 100644
index 00000000..20f92ad7
--- /dev/null
+++ b/ironic_python_agent/tests/overlord_agent_api.py
@@ -0,0 +1,171 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import httmock
+import json
+import mock
+import time
+import unittest
+
+from teeth_agent import errors
+from teeth_agent import hardware
+from teeth_agent import overlord_agent_api
+
+API_URL = 'http://agent-api.overlord.example.org/'
+
+
+class TestBaseTeethAgent(unittest.TestCase):
+ def setUp(self):
+ self.api_client = overlord_agent_api.APIClient(API_URL)
+ self.hardware_info = [
+ hardware.HardwareInfo(hardware.HardwareType.MAC_ADDRESS,
+ 'aa:bb:cc:dd:ee:ff'),
+ hardware.HardwareInfo(hardware.HardwareType.MAC_ADDRESS,
+ 'ff:ee:dd:cc:bb:aa'),
+ ]
+
+ def test_successful_heartbeat(self):
+ expected_heartbeat_before = time.time() + 120
+ response = httmock.response(status_code=204, headers={
+ 'Heartbeat-Before': expected_heartbeat_before,
+ })
+
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.return_value = response
+
+ heartbeat_before = self.api_client.heartbeat(
+ uuid='deadbeef-dabb-ad00-b105-f00d00bab10c',
+ advertise_address=('192.0.2.1', '9999')
+ )
+
+ self.assertEqual(heartbeat_before, expected_heartbeat_before)
+
+ heartbeat_path = 'v1/nodes/deadbeef-dabb-ad00-b105-f00d00bab10c/' \
+ 'vendor_passthru/heartbeat'
+ request_args = self.api_client.session.request.call_args[0]
+ self.assertEqual(request_args[0], 'POST')
+ self.assertEqual(request_args[1], API_URL + heartbeat_path)
+
+ def test_heartbeat_requests_exception(self):
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.side_effect = Exception('api is down!')
+
+ self.assertRaises(errors.HeartbeatError,
+ self.api_client.heartbeat,
+ uuid='deadbeef-dabb-ad00-b105-f00d00bab10c',
+ advertise_address=('192.0.2.1', '9999'))
+
+ def test_heartbeat_invalid_status_code(self):
+ response = httmock.response(status_code=404)
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.return_value = response
+
+ self.assertRaises(errors.HeartbeatError,
+ self.api_client.heartbeat,
+ uuid='deadbeef-dabb-ad00-b105-f00d00bab10c',
+ advertise_address=('192.0.2.1', '9999'))
+
+ def test_heartbeat_missing_heartbeat_before_header(self):
+ response = httmock.response(status_code=204)
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.return_value = response
+
+ self.assertRaises(errors.HeartbeatError,
+ self.api_client.heartbeat,
+ uuid='deadbeef-dabb-ad00-b105-f00d00bab10c',
+ advertise_address=('192.0.2.1', '9999'))
+
+ def test_heartbeat_invalid_heartbeat_before_header(self):
+ response = httmock.response(status_code=204, headers={
+ 'Heartbeat-Before': 'tomorrow',
+ })
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.return_value = response
+
+ self.assertRaises(errors.HeartbeatError,
+ self.api_client.heartbeat,
+ uuid='deadbeef-dabb-ad00-b105-f00d00bab10c',
+ advertise_address=('192.0.2.1', '9999'))
+
+ def test_lookup_node(self):
+ response = httmock.response(status_code=200, content={
+ 'node': {
+ 'uuid': 'deadbeef-dabb-ad00-b105-f00d00bab10c'
+ }
+ })
+
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.return_value = response
+
+ self.api_client.lookup_node(
+ hardware_info=self.hardware_info,
+ )
+
+ request_args = self.api_client.session.request.call_args[0]
+ self.assertEqual(request_args[0], 'POST')
+ self.assertEqual(request_args[1], API_URL + 'v1/drivers/teeth/lookup')
+
+ data = self.api_client.session.request.call_args[1]['data']
+ content = json.loads(data)
+ self.assertEqual(content['hardware'], [
+ {
+ 'type': 'mac_address',
+ 'id': 'aa:bb:cc:dd:ee:ff',
+ },
+ {
+ 'type': 'mac_address',
+ 'id': 'ff:ee:dd:cc:bb:aa',
+ },
+ ])
+
+ def test_lookup_node_bad_response_code(self):
+ response = httmock.response(status_code=400, content={
+ 'node': {
+ 'uuid': 'deadbeef-dabb-ad00-b105-f00d00bab10c'
+ }
+ })
+
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.return_value = response
+
+ self.assertRaises(errors.LookupNodeError,
+ self.api_client.lookup_node,
+ hardware_info=self.hardware_info,
+ )
+
+ def test_lookup_node_bad_response_data(self):
+ response = httmock.response(status_code=200, content='a')
+
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.return_value = response
+
+ self.assertRaises(errors.LookupNodeError,
+ self.api_client.lookup_node,
+ hardware_info=self.hardware_info
+ )
+
+ def test_lookup_node_bad_response_body(self):
+ response = httmock.response(status_code=200, content={
+ 'node_node': 'also_not_node'
+ })
+
+ self.api_client.session.request = mock.Mock()
+ self.api_client.session.request.return_value = response
+
+ self.assertRaises(errors.LookupNodeError,
+ self.api_client.lookup_node,
+ hardware_info=self.hardware_info,
+ )
diff --git a/ironic_python_agent/tests/standby.py b/ironic_python_agent/tests/standby.py
new file mode 100644
index 00000000..f5f11626
--- /dev/null
+++ b/ironic_python_agent/tests/standby.py
@@ -0,0 +1,312 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+
+import mock
+import unittest
+
+from teeth_agent import errors
+from teeth_agent import standby
+
+
+class TestStandbyMode(unittest.TestCase):
+ def setUp(self):
+ self.agent_mode = standby.StandbyMode()
+
+ def test_standby_mode(self):
+ self.assertEqual(self.agent_mode.name, 'STANDBY')
+
+ def _build_fake_image_info(self):
+ return {
+ 'id': 'fake_id',
+ 'urls': [
+ 'http://example.org',
+ ],
+ 'hashes': {
+ 'md5': 'abc123',
+ },
+ }
+
+ def test_validate_image_info_success(self):
+ standby._validate_image_info(self._build_fake_image_info())
+
+ def test_validate_image_info_missing_field(self):
+ for field in ['id', 'urls', 'hashes']:
+ invalid_info = self._build_fake_image_info()
+ del invalid_info[field]
+
+ self.assertRaises(errors.InvalidCommandParamsError,
+ standby._validate_image_info,
+ invalid_info)
+
+ def test_validate_image_info_invalid_urls(self):
+ invalid_info = self._build_fake_image_info()
+ invalid_info['urls'] = 'this_is_not_a_list'
+
+ self.assertRaises(errors.InvalidCommandParamsError,
+ standby._validate_image_info,
+ invalid_info)
+
+ def test_validate_image_info_empty_urls(self):
+ invalid_info = self._build_fake_image_info()
+ invalid_info['urls'] = []
+
+ self.assertRaises(errors.InvalidCommandParamsError,
+ standby._validate_image_info,
+ invalid_info)
+
+ def test_validate_image_info_invalid_hashes(self):
+ invalid_info = self._build_fake_image_info()
+ invalid_info['hashes'] = 'this_is_not_a_dict'
+
+ self.assertRaises(errors.InvalidCommandParamsError,
+ standby._validate_image_info,
+ invalid_info)
+
+ def test_validate_image_info_empty_hashes(self):
+ invalid_info = self._build_fake_image_info()
+ invalid_info['hashes'] = {}
+
+ self.assertRaises(errors.InvalidCommandParamsError,
+ standby._validate_image_info,
+ invalid_info)
+
+ def test_cache_image_success(self):
+ result = self.agent_mode.cache_image(
+ 'cache_image',
+ image_info=self._build_fake_image_info())
+ result.join()
+
+ def test_cache_image_invalid_image_list(self):
+ self.assertRaises(errors.InvalidCommandParamsError,
+ self.agent_mode.cache_image,
+ 'cache_image',
+ image_info={'foo': 'bar'})
+
+ def test_image_location(self):
+ image_info = self._build_fake_image_info()
+ location = standby._image_location(image_info)
+ self.assertEqual(location, '/tmp/fake_id')
+
+ @mock.patch('__builtin__.open', autospec=True)
+ @mock.patch('subprocess.call', autospec=True)
+ def test_write_image(self, call_mock, open_mock):
+ image_info = self._build_fake_image_info()
+ device = '/dev/sda'
+ location = standby._image_location(image_info)
+ script = standby._path_to_script('shell/write_image.sh')
+ command = ['/bin/bash', script, location, device]
+ call_mock.return_value = 0
+
+ standby._write_image(image_info, device)
+ call_mock.assert_called_once_with(command)
+
+ call_mock.reset_mock()
+ call_mock.return_value = 1
+
+ self.assertRaises(errors.ImageWriteError,
+ standby._write_image,
+ image_info,
+ device)
+
+ call_mock.assert_called_once_with(command)
+
+ @mock.patch('__builtin__.open', autospec=True)
+ @mock.patch('subprocess.call', autospec=True)
+ def test_copy_configdrive_to_disk(self, call_mock, open_mock):
+ device = '/dev/sda'
+ configdrive = 'configdrive'
+ script = standby._path_to_script('shell/copy_configdrive_to_disk.sh')
+ command = ['/bin/bash', script, configdrive, device]
+ call_mock.return_value = 0
+
+ standby._copy_configdrive_to_disk(configdrive, device)
+ call_mock.assert_called_once_with(command)
+
+ call_mock.reset_mock()
+ call_mock.return_value = 1
+
+ self.assertRaises(errors.ConfigDriveWriteError,
+ standby._copy_configdrive_to_disk,
+ configdrive,
+ device)
+
+ call_mock.assert_called_once_with(command)
+
+ @mock.patch('hashlib.md5', autospec=True)
+ @mock.patch('__builtin__.open', autospec=True)
+ @mock.patch('requests.get', autospec=True)
+ def test_download_image(self, requests_mock, open_mock, md5_mock):
+ image_info = self._build_fake_image_info()
+ response = requests_mock.return_value
+ response.status_code = 200
+ response.iter_content.return_value = ['some', 'content']
+ open_mock.return_value.__enter__ = lambda s: s
+ open_mock.return_value.__exit__ = mock.Mock()
+ read_mock = open_mock.return_value.read
+ read_mock.return_value = 'content'
+ hexdigest_mock = md5_mock.return_value.hexdigest
+ hexdigest_mock.return_value = image_info['hashes'].values()[0]
+
+ standby._download_image(image_info)
+ requests_mock.assert_called_once_with(image_info['urls'][0],
+ stream=True)
+ write = open_mock.return_value.write
+ write.assert_any_call('some')
+ write.assert_any_call('content')
+ self.assertEqual(write.call_count, 2)
+
+ @mock.patch('requests.get', autospec=True)
+ def test_download_image_bad_status(self, requests_mock):
+ image_info = self._build_fake_image_info()
+ response = requests_mock.return_value
+ response.status_code = 404
+ self.assertRaises(errors.ImageDownloadError,
+ standby._download_image,
+ image_info)
+
+ @mock.patch('teeth_agent.standby._verify_image', autospec=True)
+ @mock.patch('__builtin__.open', autospec=True)
+ @mock.patch('requests.get', autospec=True)
+ def test_download_image_verify_fails(self, requests_mock, open_mock,
+ verify_mock):
+ image_info = self._build_fake_image_info()
+ response = requests_mock.return_value
+ response.status_code = 200
+ verify_mock.return_value = False
+ self.assertRaises(errors.ImageChecksumError,
+ standby._download_image,
+ image_info)
+
+ @mock.patch('__builtin__.open', autospec=True)
+ @mock.patch('hashlib.sha1', autospec=True)
+ @mock.patch('hashlib.md5', autospec=True)
+ def test_verify_image_success(self, md5_mock, sha1_mock, open_mock):
+ image_info = self._build_fake_image_info()
+ image_info['hashes']['sha1'] = image_info['hashes']['md5']
+ hexdigest_mock = md5_mock.return_value.hexdigest
+ hexdigest_mock.return_value = image_info['hashes']['md5']
+ hexdigest_mock = sha1_mock.return_value.hexdigest
+ hexdigest_mock.return_value = image_info['hashes']['sha1']
+ image_location = '/foo/bar'
+
+ verified = standby._verify_image(image_info, image_location)
+ self.assertTrue(verified)
+ # make sure we only check one hash, even though both are valid
+ self.assertEqual(md5_mock.call_count + sha1_mock.call_count, 1)
+
+ @mock.patch('__builtin__.open', autospec=True)
+ @mock.patch('hashlib.md5', autospec=True)
+ def test_verify_image_failure(self, md5_mock, open_mock):
+ image_info = self._build_fake_image_info()
+ md5_mock.return_value.hexdigest.return_value = 'wrong hash'
+ image_location = '/foo/bar'
+
+ verified = standby._verify_image(image_info, image_location)
+ self.assertFalse(verified)
+ self.assertEqual(md5_mock.call_count, 1)
+
+ @mock.patch('teeth_agent.hardware.get_manager', autospec=True)
+ @mock.patch('teeth_agent.standby._write_image', autospec=True)
+ @mock.patch('teeth_agent.standby._download_image', autospec=True)
+ def test_cache_image(self, download_mock, write_mock, hardware_mock):
+ image_info = self._build_fake_image_info()
+ download_mock.return_value = None
+ write_mock.return_value = None
+ manager_mock = hardware_mock.return_value
+ manager_mock.get_os_install_device.return_value = 'manager'
+ async_result = self.agent_mode.cache_image('cache_image',
+ image_info=image_info)
+ async_result.join()
+ download_mock.assert_called_once_with(image_info)
+ write_mock.assert_called_once_with(image_info, 'manager')
+ self.assertEqual(self.agent_mode.cached_image_id, image_info['id'])
+ self.assertEqual('SUCCEEDED', async_result.command_status)
+ self.assertEqual(None, async_result.command_result)
+
+ @mock.patch('teeth_agent.standby._copy_configdrive_to_disk', autospec=True)
+ @mock.patch('teeth_agent.standby.configdrive.write_configdrive',
+ autospec=True)
+ @mock.patch('teeth_agent.hardware.get_manager', autospec=True)
+ @mock.patch('teeth_agent.standby._write_image', autospec=True)
+ @mock.patch('teeth_agent.standby._download_image', autospec=True)
+ @mock.patch('teeth_agent.standby._configdrive_location', autospec=True)
+ def test_prepare_image(self,
+ location_mock,
+ download_mock,
+ write_mock,
+ hardware_mock,
+ configdrive_mock,
+ configdrive_copy_mock):
+ image_info = self._build_fake_image_info()
+ location_mock.return_value = 'THE CLOUD'
+ download_mock.return_value = None
+ write_mock.return_value = None
+ manager_mock = hardware_mock.return_value
+ manager_mock.get_os_install_device.return_value = 'manager'
+ configdrive_mock.return_value = None
+ configdrive_copy_mock.return_value = None
+
+ async_result = self.agent_mode.prepare_image('prepare_image',
+ image_info=image_info,
+ metadata={},
+ files=[])
+ async_result.join()
+
+ download_mock.assert_called_once_with(image_info)
+ write_mock.assert_called_once_with(image_info, 'manager')
+ configdrive_mock.assert_called_once_with('THE CLOUD', {}, [])
+ configdrive_copy_mock.assert_called_once_with('THE CLOUD', 'manager')
+
+ self.assertEqual('SUCCEEDED', async_result.command_status)
+ self.assertEqual(None, async_result.command_result)
+
+ download_mock.reset_mock()
+ write_mock.reset_mock()
+ configdrive_mock.reset_mock()
+ configdrive_copy_mock.reset_mock()
+ # image is now cached, make sure download/write doesn't happen
+ async_result = self.agent_mode.prepare_image('prepare_image',
+ image_info=image_info,
+ metadata={},
+ files=[])
+ async_result.join()
+
+ self.assertEqual(download_mock.call_count, 0)
+ self.assertEqual(write_mock.call_count, 0)
+ configdrive_mock.assert_called_once_with('THE CLOUD', {}, [])
+ configdrive_copy_mock.assert_called_once_with('THE CLOUD', 'manager')
+
+ self.assertEqual('SUCCEEDED', async_result.command_status)
+ self.assertEqual(None, async_result.command_result)
+
+ @mock.patch('subprocess.call', autospec=True)
+ def test_run_image(self, call_mock):
+ script = standby._path_to_script('shell/reboot.sh')
+ command = ['/bin/bash', script]
+ call_mock.return_value = 0
+
+ success_result = self.agent_mode.run_image('run_image')
+ success_result.join()
+ call_mock.assert_called_once_with(command)
+
+ call_mock.reset_mock()
+ call_mock.return_value = 1
+
+ failed_result = self.agent_mode.run_image('run_image')
+ failed_result.join()
+
+ call_mock.assert_called_once_with(command)
+ self.assertEqual('FAILED', failed_result.command_status)
diff --git a/ironic_python_agent/utils.py b/ironic_python_agent/utils.py
new file mode 100644
index 00000000..bef52d3c
--- /dev/null
+++ b/ironic_python_agent/utils.py
@@ -0,0 +1,25 @@
+"""
+Copyright 2013 Rackspace, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import collections
+import ordereddict
+
+
+def get_ordereddict(*args, **kwargs):
+ """A fix for py26 not having ordereddict."""
+ try:
+ return collections.OrderedDict(*args, **kwargs)
+ except AttributeError:
+ return ordereddict.OrderedDict(*args, **kwargs)