Skip to content

Commit

Permalink
Provide correct connector for evacuate terminate
Browse files Browse the repository at this point in the history
During evacuation a local connector is built. This is the
wrong connector to use for cinder terminate_connection.
In order to fix this, store the initial connector with
the BDM connection_info. Use the stored connector when
we detect that we have this wrong host situation.

This fix does not work for existing attachments
(made prior to this patch) because existing attachments
don't have the connector stashed in the bdm.connection_info.
In cases where the original connector was not saved, leave
the behavior as-is.

Change-Id: I793f2996fc0af1c321a240ad9348dc9bce816030
Partial-Bug: #1522496
  • Loading branch information
mark.sturdevant authored and John Garbutt committed Mar 7, 2016
1 parent 38c3f88 commit a686185
Show file tree
Hide file tree
Showing 4 changed files with 150 additions and 11 deletions.
42 changes: 41 additions & 1 deletion nova/compute/manager.py
Expand Up @@ -4753,6 +4753,8 @@ def _driver_detach_volume(self, context, instance, bdm):
context=context, instance=instance)
self.volume_api.roll_detaching(context, volume_id)

return connection_info

def _detach_volume(self, context, volume_id, instance, destroy_bdm=True,
attachment_id=None):
"""Detach a volume from an instance.
Expand Down Expand Up @@ -4797,8 +4799,46 @@ def _detach_volume(self, context, volume_id, instance, destroy_bdm=True,
self.notifier.info(context, 'volume.usage',
compute_utils.usage_volume_info(vol_usage))

self._driver_detach_volume(context, instance, bdm)
connection_info = self._driver_detach_volume(context, instance, bdm)
connector = self.driver.get_volume_connector(instance)

if connection_info and not destroy_bdm and (
connector.get('host') != instance.host):
# If the volume is attached to another host (evacuate) then
# this connector is for the wrong host. Use the connector that
# was stored in connection_info instead (if we have one, and it
# is for the expected host).
stashed_connector = connection_info.get('connector')
if not stashed_connector:
# Volume was attached before we began stashing connectors
LOG.warning(_LW("Host mismatch detected, but stashed "
"volume connector not found. Instance host is "
"%(ihost)s, but volume connector host is "
"%(chost)s."),
{'ihost': instance.host,
'chost': connector.get('host')})
elif stashed_connector.get('host') != instance.host:
# Unexpected error. The stashed connector is also not matching
# the needed instance host.
LOG.error(_LE("Host mismatch detected in stashed volume "
"connector. Will use local volume connector. "
"Instance host is %(ihost)s. Local volume "
"connector host is %(chost)s. Stashed volume "
"connector host is %(schost)s."),
{'ihost': instance.host,
'chost': connector.get('host'),
'schost': stashed_connector.get('host')})
else:
# Fix found. Use stashed connector.
LOG.debug("Host mismatch detected. Found usable stashed "
"volume connector. Instance host is %(ihost)s. "
"Local volume connector host was %(chost)s. "
"Stashed volume connector host is %(schost)s.",
{'ihost': instance.host,
'chost': connector.get('host'),
'schost': stashed_connector.get('host')})
connector = stashed_connector

self.volume_api.terminate_connection(context, volume_id, connector)

if destroy_bdm:
Expand Down
90 changes: 90 additions & 0 deletions nova/tests/unit/compute/test_compute_mgr.py
Expand Up @@ -23,6 +23,7 @@
import netaddr
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from oslo_utils import importutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
Expand Down Expand Up @@ -2199,6 +2200,8 @@ def _test_detach_volume(self, notify_inst_usage, detach,
bdm.device_name = 'vdb'
bdm_get.return_value = bdm

detach.return_value = {}

with mock.patch.object(self.compute, 'volume_api') as volume_api:
with mock.patch.object(self.compute, 'driver') as driver:
connector_sentinel = mock.sentinel.connector
Expand Down Expand Up @@ -2226,6 +2229,93 @@ def _test_detach_volume(self, notify_inst_usage, detach,
else:
self.assertFalse(bdm.destroy.called)

def test_detach_volume_evacuate(self):
"""For evacuate, terminate_connection is called with original host."""
expected_connector = {'host': 'evacuated-host'}
conn_info_str = '{"connector": {"host": "evacuated-host"}}'
self._test_detach_volume_evacuate(conn_info_str,
expected=expected_connector)

def test_detach_volume_evacuate_legacy(self):
"""Test coverage for evacuate with legacy attachments.
In this case, legacy means the volume was attached to the instance
before nova stashed the connector in connection_info. The connector
sent to terminate_connection will still be for the local host in this
case because nova does not have the info to get the connector for the
original (evacuated) host.
"""
conn_info_str = '{"foo": "bar"}' # Has no 'connector'.
self._test_detach_volume_evacuate(conn_info_str)

def test_detach_volume_evacuate_mismatch(self):
"""Test coverage for evacuate with connector mismatch.
For evacuate, if the stashed connector also has the wrong host,
then log it and stay with the local connector.
"""
conn_info_str = '{"connector": {"host": "other-host"}}'
self._test_detach_volume_evacuate(conn_info_str)

@mock.patch('nova.objects.BlockDeviceMapping.get_by_volume_and_instance')
@mock.patch('nova.compute.manager.ComputeManager.'
'_notify_about_instance_usage')
def _test_detach_volume_evacuate(self, conn_info_str, notify_inst_usage,
bdm_get, expected=None):
"""Re-usable code for detach volume evacuate test cases.
:param conn_info_str: String form of the stashed connector.
:param expected: Dict of the connector that is expected in the
terminate call (optional). Default is to expect the
local connector to be used.
"""
volume_id = 'vol_id'
instance = fake_instance.fake_instance_obj(self.context,
host='evacuated-host')
bdm = mock.Mock()
bdm.connection_info = conn_info_str
bdm_get.return_value = bdm

local_connector = {'host': 'local-connector-host'}
expected_connector = local_connector if not expected else expected

with mock.patch.object(self.compute, 'volume_api') as volume_api:
with mock.patch.object(self.compute, 'driver') as driver:
driver.get_volume_connector.return_value = local_connector

self.compute._detach_volume(self.context,
volume_id,
instance,
destroy_bdm=False)

driver.get_volume_connector.assert_called_once_with(instance)
volume_api.terminate_connection.assert_called_once_with(
self.context, volume_id, expected_connector)
volume_api.detach.assert_called_once_with(mock.ANY,
volume_id,
instance.uuid,
None)
notify_inst_usage.assert_called_once_with(
self.context, instance, "volume.detach",
extra_usage_info={'volume_id': volume_id}
)

def test__driver_detach_volume_return(self):
"""_driver_detach_volume returns the connection_info from loads()."""
with mock.patch.object(jsonutils, 'loads') as loads:
conn_info_str = 'test-expected-loads-param'
bdm = mock.Mock()
bdm.connection_info = conn_info_str
loads.return_value = {'test-loads-key': 'test loads return value'}
instance = fake_instance.fake_instance_obj(self.context)

ret = self.compute._driver_detach_volume(self.context,
instance,
bdm)

self.assertEqual(loads.return_value, ret)
loads.assert_called_once_with(conn_info_str)

def _test_rescue(self, clean_shutdown=True):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE)
Expand Down
23 changes: 15 additions & 8 deletions nova/tests/unit/volume/test_cinder.py
Expand Up @@ -289,15 +289,22 @@ def test_detach(self):

self.api.detach(self.ctx, 'id1', instance_uuid='fake_uuid')

def test_initialize_connection(self):
cinder.cinderclient(self.ctx).AndReturn(self.cinderclient)
self.mox.StubOutWithMock(self.cinderclient.volumes,
'initialize_connection',
use_mock_anything=True)
self.cinderclient.volumes.initialize_connection('id1', 'connector')
self.mox.ReplayAll()
@mock.patch('nova.volume.cinder.cinderclient')
def test_initialize_connection(self, mock_cinderclient):
connection_info = {'foo': 'bar'}
mock_cinderclient.return_value.volumes. \
initialize_connection.return_value = connection_info

volume_id = 'fake_vid'
connector = {'host': 'fakehost1'}
actual = self.api.initialize_connection(self.ctx, volume_id, connector)

expected = connection_info
expected['connector'] = connector
self.assertEqual(expected, actual)

self.api.initialize_connection(self.ctx, 'id1', 'connector')
mock_cinderclient.return_value.volumes. \
initialize_connection.assert_called_once_with(volume_id, connector)

@mock.patch('nova.volume.cinder.cinderclient')
def test_initialize_connection_rollback(self, mock_cinderclient):
Expand Down
6 changes: 4 additions & 2 deletions nova/volume/cinder.py
Expand Up @@ -395,8 +395,10 @@ def detach(self, context, volume_id, instance_uuid=None,
@translate_volume_exception
def initialize_connection(self, context, volume_id, connector):
try:
return cinderclient(context).volumes.initialize_connection(
volume_id, connector)
connection_info = cinderclient(
context).volumes.initialize_connection(volume_id, connector)
connection_info['connector'] = connector
return connection_info
except cinder_exception.ClientException as ex:
with excutils.save_and_reraise_exception():
LOG.error(_LE('Initialize connection failed for volume '
Expand Down

0 comments on commit a686185

Please sign in to comment.