Skip to content

Instantly share code, notes, and snippets.

@ErwanAliasr1
Created February 23, 2016 13:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ErwanAliasr1/6cd5f20ae5b860dc84e7 to your computer and use it in GitHub Desktop.
Save ErwanAliasr1/6cd5f20ae5b860dc84e7 to your computer and use it in GitHub Desktop.
plop
erwan@R1:~/Devel/chroot/ceph/src (evelu-check)$ cat ceph-disk/run-tox.sh.log
flake8 develop-inst-nodeps: /ceph/src/ceph-disk
flake8 installed: ceph-detect-init==1.0.1,-e git+https://github.com/ceph/ceph.git@75c6c53aa21819281f99f21c20c9ada9856a1c21#egg=ceph_disk&subdirectory=src/ceph-disk,configobj==5.0.6,coverage==4.0.3,discover==0.4.0,extras==0.0.3,fixtures==1.4.0,flake8==2.5.4,funcsigs==0.4,linecache2==1.0.0,mccabe==0.4.0,mock==1.3.0,pbr==1.8.1,pep8==1.7.0,pluggy==0.3.1,py==1.4.31,pyflakes==1.0.0,pyrsistent==0.11.12,pytest==2.8.7,python-mimeparse==1.5.1,python-subunit==1.2.0,six==1.10.0,testrepository==0.0.20,testtools==2.0.0,tox==2.3.1,traceback2==1.4.0,unittest2==1.1.0,virtualenv==14.0.6,wheel==0.29.0
flake8 runtests: PYTHONHASHSEED='2232632747'
flake8 runtests: commands[0] | flake8 --ignore=H105,H405 ceph_disk tests
py27 develop-inst-nodeps: /ceph/src/ceph-disk
py27 installed: ceph-detect-init==1.0.1,-e git+https://github.com/ceph/ceph.git@75c6c53aa21819281f99f21c20c9ada9856a1c21#egg=ceph_disk&subdirectory=src/ceph-disk,configobj==5.0.6,coverage==4.0.3,discover==0.4.0,extras==0.0.3,fixtures==1.4.0,flake8==2.5.4,funcsigs==0.4,linecache2==1.0.0,mccabe==0.4.0,mock==1.3.0,pbr==1.8.1,pep8==1.7.0,pluggy==0.3.1,py==1.4.31,pyflakes==1.0.0,pyrsistent==0.11.12,pytest==2.8.7,python-mimeparse==1.5.1,python-subunit==1.2.0,six==1.10.0,testrepository==0.0.20,testtools==2.0.0,tox==2.3.1,traceback2==1.4.0,unittest2==1.1.0,virtualenv==14.0.6,wheel==0.29.0
py27 runtests: PYTHONHASHSEED='2232632747'
py27 runtests: commands[0] | coverage run --append --source=ceph_disk /ceph/src/ceph-disk/.tox/py27/bin/py.test -vv tests/test_main.py
============================= test session starts ==============================
platform linux2 -- Python 2.7.10, pytest-2.8.7, py-1.4.31, pluggy-0.3.1 -- /ceph/src/ceph-disk/.tox/py27/bin/python2.7
cachedir: .cache
rootdir: /ceph/src/ceph-disk, inifile:
collecting ... collected 25 items
tests/test_main.py::TestCephDisk::test_main_list_json PASSED
tests/test_main.py::TestCephDisk::test_main_list_plain PASSED
tests/test_main.py::TestCephDisk::test_list_format_more_osd_info_plain PASSED
tests/test_main.py::TestCephDisk::test_list_format_plain PASSED
tests/test_main.py::TestCephDisk::test_list_format_dev_plain PASSED
tests/test_main.py::TestCephDisk::test_list_dev_osd PASSED
tests/test_main.py::TestCephDisk::test_list_all_partitions PASSED
tests/test_main.py::TestCephDisk::test_list_data PASSED
tests/test_main.py::TestCephDisk::test_list_dmcrypt_data FAILED
tests/test_main.py::TestCephDisk::test_list_multipath PASSED
tests/test_main.py::TestCephDisk::test_list_default PASSED
tests/test_main.py::TestCephDisk::test_list_bluestore PASSED
tests/test_main.py::TestCephDisk::test_list_other PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_check_osd_status PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_deallocate_osd_id_fail PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_delete_osd_auth_key_fail PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_main_deactivate FAILED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_main_destroy FAILED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_mark_out_out PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_mount PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_path_set_context PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_remove_from_crush_map_fail PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_remove_osd_directory_files PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_stop_daemon PASSED
tests/test_main.py::TestCephDiskDeactivateAndDestroy::test_umount PASSED
=================================== FAILURES ===================================
_____________________ TestCephDisk.test_list_dmcrypt_data ______________________
self = <test_main.TestCephDisk object at 0x7fa8f2304750>
def test_list_dmcrypt_data(self):
partition_type2type = {
main.PTYPE['plain']['osd']['ready']: 'plain',
main.PTYPE['luks']['osd']['ready']: 'LUKS',
}
for (partition_type, type) in partition_type2type.iteritems():
#
# dmcrypt data partition with one holder
#
partition_uuid = "56244cf5-83ef-4984-888a-2d8b8e0e04b2"
disk = "Xda"
partition = "Xda1"
holders = ["dm-0"]
with patch.multiple(
main,
is_held=lambda dev: holders,
list_all_partitions=lambda: {disk: [partition]},
get_partition_uuid=lambda dev: partition_uuid,
get_partition_type=lambda dev: partition_type,
is_partition=lambda dev: True,
):
expect = [{'path': '/dev/' + disk,
'partitions': [{
'dmcrypt': {
'holders': holders,
'type': type,
},
'fs_type': None,
'is_partition': True,
'mount': None,
'path': '/dev/' + partition,
'ptype': partition_type,
'state': 'unprepared',
'type': 'data',
'uuid': partition_uuid,
}]}]
> assert expect == main.list_devices()
E assert [{'partitions...: '/dev/Xda'}] == [{'partitions'...: '/dev/Xda'}]
E At index 0 diff: {'path': '/dev/Xda', 'partitions': [{'path': '/dev/Xda1', 'state': 'unprepared', 'uuid': '56244cf5-83ef-4984-888a-2d8b8e0e04b2', 'is_partition': True, 'dmcrypt': {'holders': ['dm-0'], 'type': 'LUKS'}, 'fs_type': None, 'mount': None, 'ptype': '4fbd7e29-9d25-41b8-afd0-35865ceff05d', 'type': 'data'}]} != {'path': '/dev/Xda', 'partitions': [{'path': '/dev/Xda1', 'fs_type': 'btrfs', 'whoami': None, 'mount': '/var/cache/ccache', 'ptype': '4fbd7e29-9d25-41b8-afd0-35865ceff05d', 'state': 'active', 'ceph_fsid': None, 'uuid': '56244cf5-83ef-4984-888a-2d8b8e0e04b2', 'is_partition': True, 'dmcrypt': {'holders': ['dm-0'], 'type': 'LUKS'}, 'type': 'data'}]}
E Full diff:
E - [{'partitions': [{'dmcrypt': {'holders': ['dm-0'], 'type': 'LUKS'},
E + [{'partitions': [{'ceph_fsid': None,
E + 'dmcrypt': {'holders': ['dm-0'], 'type': 'LUKS'},
E - 'fs_type': None,
E ? ^^^^
E + 'fs_type': 'btrfs',
E ? ^^^^^^^
E 'is_partition': True,
E - 'mount': None,
E + 'mount': '/var/cache/ccache',
E 'path': '/dev/Xda1',
E 'ptype': '4fbd7e29-9d25-41b8-afd0-35865ceff05d',
E - 'state': 'unprepared',
E ? ^^^^ -----
E + 'state': 'active',
E ? ^^^^^
E 'type': 'data',
E - 'uuid': '56244cf5-83ef-4984-888a-2d8b8e0e04b2'}],
E ? --
E + 'uuid': '56244cf5-83ef-4984-888a-2d8b8e0e04b2',
E + 'whoami': None}],
E 'path': '/dev/Xda'}]
tests/test_main.py:342: AssertionError
----------------------------- Captured stderr call -----------------------------
DEBUG:ceph_disk.main:main_list: /dev/Xda1 ptype = 4fbd7e29-9d25-41b8-afd0-35865ceff05d uuid = 56244cf5-83ef-4984-888a-2d8b8e0e04b2
INFO:ceph_disk.main:Running command: /sbin/blkid -s TYPE /dev/dm-0
DEBUG:ceph_disk.main:Mounting /dev/dm-0 on /var/lib/ceph/tmp/mnt.24G0oP with options
INFO:ceph_disk.main:Running command: /bin/mount -t btrfs -o -- /dev/dm-0 /var/lib/ceph/tmp/mnt.24G0oP
INFO:ceph_disk.main:Running command: /sbin/restorecon /var/lib/ceph/tmp/mnt.24G0oP
DEBUG:ceph_disk.main:Unmounting /var/lib/ceph/tmp/mnt.24G0oP
INFO:ceph_disk.main:Running command: /bin/umount -- /var/lib/ceph/tmp/mnt.24G0oP
DEBUG:ceph_disk.main:main_list: {'Xda': ['Xda1']}, uuid_map = {'56244cf5-83ef-4984-888a-2d8b8e0e04b2': '/dev/Xda1'}, space_map = {}
INFO:ceph_disk.main:list_dev(dev = /dev/Xda1, ptype = 4fbd7e29-9d25-41b8-afd0-35865ceff05d)
INFO:ceph_disk.main:Running command: /sbin/blkid -s TYPE /dev/dm-0
DEBUG:ceph_disk.main:list_devices: [{'path': '/dev/Xda', 'partitions': [{'path': '/dev/Xda1', 'fs_type': 'btrfs', 'whoami': None, 'mount': '/var/cache/ccache', 'ptype': '4fbd7e29-9d25-41b8-afd0-35865ceff05d', 'state': 'active', 'ceph_fsid': None, 'uuid': '56244cf5-83ef-4984-888a-2d8b8e0e04b2', 'is_partition': True, 'dmcrypt': {'holders': ['dm-0'], 'type': 'LUKS'}, 'type': 'data'}]}]
____________ TestCephDiskDeactivateAndDestroy.test_main_deactivate _____________
self = <test_main.TestCephDiskDeactivateAndDestroy testMethod=test_main_deactivate>
mock_open = <MagicMock name='open' id='140363593433872'>
@patch('__builtin__.open')
def test_main_deactivate(self, mock_open):
data = tempfile.mkdtemp()
main.setup_statedir(data)
DMCRYPT_LUKS_OSD_UUID = '4fbd7e29-9d25-41b8-afd0-35865ceff05d'
part_uuid = '0ce28a16-6d5d-11e5-aec3-fa163e5c167b'
disk = 'sdX'
#
# Can not find match device by osd-id
#
args = main.parse_args(['deactivate',
'--cluster', 'ceph',
> '--deactivate-by-id', '5566'])
tests/test_main.py:691:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
ceph_disk/main.py:4240: in parse_args
args = parser.parse_args(argv)
/usr/lib64/python2.7/argparse.py:1701: in parse_args
args, argv = self.parse_known_args(args, namespace)
/usr/lib64/python2.7/argparse.py:1733: in parse_known_args
namespace, args = self._parse_known_args(args, namespace)
/usr/lib64/python2.7/argparse.py:1921: in _parse_known_args
positionals_end_index = consume_positionals(start_index)
/usr/lib64/python2.7/argparse.py:1898: in consume_positionals
take_action(action, args)
/usr/lib64/python2.7/argparse.py:1807: in take_action
action(self, namespace, argument_values, option_string)
/usr/lib64/python2.7/argparse.py:1096: in __call__
subnamespace, arg_strings = parser.parse_known_args(arg_strings, None)
/usr/lib64/python2.7/argparse.py:1733: in parse_known_args
namespace, args = self._parse_known_args(args, namespace)
/usr/lib64/python2.7/argparse.py:1950: in _parse_known_args
self.error(_('too few arguments'))
/usr/lib64/python2.7/argparse.py:2374: in error
self.exit(2, _('%s: error: %s\n') % (self.prog, message))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = ArgumentParser(prog='ceph-disk deactivate', usage=None, description=None, vers...lass=<class 'argparse.HelpFormatter'>, conflict_handler='error', add_help=True)
status = 2, message = 'ceph-disk deactivate: error: too few arguments\n'
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
> _sys.exit(status)
E SystemExit: 2
/usr/lib64/python2.7/argparse.py:2362: SystemExit
----------------------------- Captured stderr call -----------------------------
usage: ceph-disk deactivate [-h] [--cluster NAME] [--deactivate-by-id <id>]
[--mark-out]
PATH
ceph-disk deactivate: error: too few arguments
______________ TestCephDiskDeactivateAndDestroy.test_main_destroy ______________
self = <test_main.TestCephDiskDeactivateAndDestroy testMethod=test_main_destroy>
def test_main_destroy(self):
OSD_UUID = '4fbd7e29-9d25-41b8-afd0-062c0ceff05d'
MPATH_OSD_UUID = '4fbd7e29-8ae0-4982-bf9d-5a8d867af560'
part_uuid = '0ce28a16-6d5d-11e5-aec3-fa163e5c167b'
journal_uuid = "7ad5e65a-0ca5-40e4-a896-62a74ca61c55"
mount_5566 = '/var/lib/ceph/osd/ceph-5566/'
fake_devices_normal = [{'path': '/dev/sdY',
'partitions': [{
'dmcrypt': {},
'ptype': OSD_UUID,
'path': '/dev/sdY1',
'whoami': '5566',
'mount': mount_5566,
'uuid': part_uuid,
'journal_uuid': journal_uuid}]},
{'path': '/dev/sdX',
'partitions': [{
'dmcrypt': {},
'ptype': MPATH_OSD_UUID,
'path': '/dev/sdX1',
'whoami': '7788',
'mount': '/var/lib/ceph/osd/ceph-7788/',
'uuid': part_uuid,
'journal_uuid': journal_uuid}]}]
def list_devices_return():
return fake_devices_normal
#
# input device is not the device partition
#
args = main.parse_args(['destroy', '--cluster', 'ceph', '/dev/sdX'])
with patch.multiple(
main,
is_partition=lambda path: False,
):
self.assertRaises(Exception, main.main_destroy, args)
#
# skip the redundent devices and not found by dev
#
args = main.parse_args(['destroy', '--cluster', 'ceph', '/dev/sdZ1'])
with patch.multiple(
main,
is_partition=lambda path: True,
list_devices=list_devices_return,
):
self.assertRaises(Exception, main.main_destroy, args)
#
# skip the redundent devices and not found by osd-id
#
args = main.parse_args(['destroy', '--cluster', 'ceph',
> '--destroy-by-id', '1234'])
tests/test_main.py:1208:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
ceph_disk/main.py:4240: in parse_args
args = parser.parse_args(argv)
/usr/lib64/python2.7/argparse.py:1701: in parse_args
args, argv = self.parse_known_args(args, namespace)
/usr/lib64/python2.7/argparse.py:1733: in parse_known_args
namespace, args = self._parse_known_args(args, namespace)
/usr/lib64/python2.7/argparse.py:1921: in _parse_known_args
positionals_end_index = consume_positionals(start_index)
/usr/lib64/python2.7/argparse.py:1898: in consume_positionals
take_action(action, args)
/usr/lib64/python2.7/argparse.py:1807: in take_action
action(self, namespace, argument_values, option_string)
/usr/lib64/python2.7/argparse.py:1096: in __call__
subnamespace, arg_strings = parser.parse_known_args(arg_strings, None)
/usr/lib64/python2.7/argparse.py:1733: in parse_known_args
namespace, args = self._parse_known_args(args, namespace)
/usr/lib64/python2.7/argparse.py:1950: in _parse_known_args
self.error(_('too few arguments'))
/usr/lib64/python2.7/argparse.py:2374: in error
self.exit(2, _('%s: error: %s\n') % (self.prog, message))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = ArgumentParser(prog='ceph-disk destroy', usage=None, description=None, version...lass=<class 'argparse.HelpFormatter'>, conflict_handler='error', add_help=True)
status = 2, message = 'ceph-disk destroy: error: too few arguments\n'
def exit(self, status=0, message=None):
if message:
self._print_message(message, _sys.stderr)
> _sys.exit(status)
E SystemExit: 2
/usr/lib64/python2.7/argparse.py:2362: SystemExit
----------------------------- Captured stderr call -----------------------------
usage: ceph-disk destroy [-h] [--cluster NAME] [--destroy-by-id <id>]
[--dmcrypt-key-dir KEYDIR] [--zap]
PATH
ceph-disk destroy: error: too few arguments
===================== 3 failed, 22 passed in 23.61 seconds =====================
ERROR: InvocationError: '/ceph/src/ceph-disk/.tox/py27/bin/coverage run --append --source=ceph_disk /ceph/src/ceph-disk/.tox/py27/bin/py.test -vv tests/test_main.py'
___________________________________ summary ____________________________________
flake8: commands succeeded
ERROR: py27: commands failed
FAIL ceph-disk/run-tox.sh (exit status: 1)
erwan@R1:~/Devel/chroot/ceph/src (evelu-check)$
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment