- config
- code workflow
- log
-
-
Save zhanghui9700/96fa3f2eeb93adffd01db94a62014690 to your computer and use it in GitHub Desktop.
openstack baremetal as a service Ironic
node-1,node-3 controller+nova-compute(ironic)
node-2,node-4 nova-compute(kvm)
node-5 ironic-conductor
overview
make sure nova-scheduler can scheduler barematel request to ironic-compute
- /etc/nova/nova.conf | grep "scheduler_default_filters" | grep ComputeCapabilitiesFilter
- flavor extra_spces capabilities=boot_option:local
- node-update add properties/cpu_arch=x86_64 properties/capabilities=boot_option:local
node-3 /etc/nova/nova.con
scheduler_host_manager=nova.scheduler.ironic_host_manager.IronicHostManager
enroll baremetal
ironic node-create \
-d fuel_ipmitool \
-i ipmi_address="${IPMI_ADDRESS}" \
-i ipmi_username=ADMIN \
-i ipmi_password=ADMIN \
-i deploy_ramdisk="${INITRAMFS_IMAGE}" \
-i deploy_kernel="${KERNEL_IMAGE}" \
-i deploy_squashfs="${KERNEL_SQUASHFS}" \
-n baremetal-test
ironic node-update <baremetal-test-UUID> add \
driver_info/ipmi_terminal_port=623
ironic port-create \
-n <baremetal-test-UUID> \
-a ${NODE_MACADDRESS}
ironic node-update <baremetal-test-UUID> add \
properties/cpus=40 \
properties/memory_mb=125000 \
properties/local_gb=120 \
properties/capabilities=boot_option:local
ironic node-validate <baremetal-test-UUID>
check
root@node-3:/# ironic node-validate 37561c05-5e7c-4983-9ed5-714edc94216c
+------------+--------+---------------+
| Interface | Result | Reason |
+------------+--------+---------------+
| boot | None | not supported |
| console | True | |
| deploy | True | |
| inspect | None | not supported |
| management | True | |
| power | True | |
| raid | None | not supported |
+------------+--------+---------------+
launch instance
root@node-5:~# ll /usr/lib/python2.7/dist-packages/ironic
ironic/ ironicclient/ ironic_fa_deploy-9.0.0.egg-info/ ironic_lib-1.1.0.egg-info/
ironic-5.1.0.egg-info/ ironic_fa_deploy/ ironic_lib/ ironic_tempest_plugin/
root@node-5:~# cat /etc/ironic/ironic.conf
[DEFAULT]
auth_strategy=keystone
enabled_drivers = pxe_ipmitool,fuel_ipmitool
verbose=True
log_dir=/var/log/ironic
rpc_backend=rabbit
control_exchange=ironic
root@node-5:~# cat /etc/ironic/ironic.conf | grep -v "^#\|^$" | grep -A 10 "\[glance\]"
[glance]
swift_temp_url_key=pPQHG3LNFyP0aMZa2OrwuyVx
swift_endpoint_url=http://192.168.3.2:8080
temp_url_endpoint_type=radosgw
glance_api_servers=172.16.11.2:9292
glance_api_insecure=False
glance_num_retries=0
root@bootstrap-ironic:~# cat /etc/fuel-agent/fuel-agent.conf(inside the baremetal)
[DEFAULT]
use_stderr=false
logging_debug_format_suffix=
log_file=/var/log/fuel-agent.log
use_syslog=true
use_syslog_rfc_format=true
prepare_configdrive=false
**fix_udev_net_rules**=false
root@node-5:~# cat /var/log/ironic/ironic-conductor.log | grep "Loaded the following drivers"
2017-02-28 02:14:08.741 16176 INFO ironic.common.driver_factory [req-caccb840-80a1-4c9c-bf0a-1621ce5fc14b - - - - -] Loaded the following drivers: ['fake', 'fuel_ipmitool', 'fuel_ssh', 'fuel_libvirt']
root@node-5:/usr/lib/python2.7/dist-packages# cat ironic_fa_deploy-9.0.0.egg-info/entry_points.txt
[ironic.drivers]
fuel_ipmitool = ironic_fa_deploy.fuel:FuelAndIPMIToolDriver
fuel_libvirt = ironic_fa_deploy.fuel:FuelAndLibvirtDriver
fuel_ssh = ironic_fa_deploy.fuel:FuelAndSSHDriver
ironic_fa_deploy属于fule-agent项目
class FuelAndIPMIToolDriver(base.BaseDriver):
"""Fuel + IPMITool driver.
This driver implements the `core` functionality, combining
:class:`ironic.drivers.modules.ipmitool.IPMIPower` (for power on/off and
reboot) with :class:`ironic.drivers.modules.fuel_agent.FuelAgentDeploy`
(for image deployment).
Implementations are in those respective classes; this class is merely the
glue between them.
"""
def __init__(self):
self.power = ipmitool.IPMIPower()
self.deploy = fuel_agent.FuelAgentDeploy()
self.management = ipmitool.IPMIManagement()
self.console = ipmitool.IPMIShellinaboxConsole()
self.vendor = fuel_agent.FuelAgentVendor()
root@node-3:~# ironic node-list
+--------------------------------------+------------+---------------+-------------+--------------------+-------------+
| UUID | Name | Instance UUID | Power State | Provisioning State | Maintenance |
+--------------------------------------+------------+---------------+-------------+--------------------+-------------+
| 37561c05-5e7c-4983-9ed5-714edc94216c | instance-1 | None | power off | available | False |
+--------------------------------------+------------+---------------+-------------+--------------------+-------------+
root@node-3:~# ironic node-show instance-1
+------------------------+---------------------------------------------------------------------+
| Property | Value |
+------------------------+---------------------------------------------------------------------+
| chassis_uuid | |
| clean_step | {} |
| console_enabled | False |
| created_at | 2017-02-27T09:57:55+00:00 |
| driver | fuel_ipmitool |
| driver_info | {u'ipmi_terminal_port': 623, u'ipmi_username': u'ADMIN', |
| | u'deploy_kernel': u'9a0044b2-d1c3-4605-82e3-55c4babe26c5', |
| | u'ipmi_address': u'172.16.10.2', u'deploy_ramdisk': u'1e6b438c-67e4 |
| | -4ddd-a6ec-299689d0550b', u'ipmi_password': u'******', |
| | u'deploy_squashfs': u'674a23e8-f13f-4fe1-a87d-4e1b987cb0fb'} |
| driver_internal_info | {u'clean_steps': None, u'is_whole_disk_image': False} |
| extra | {} |
| inspection_finished_at | None |
| inspection_started_at | None |
| instance_info | {} |
| instance_uuid | None |
| last_error | None |
| maintenance | False |
| maintenance_reason | None |
| name | instance-1 |
| power_state | power off |
| properties | {u'memory_mb': 125000, u'cpu_arch': u'x86_64', u'local_gb': 120, |
| | u'cpus': 40, u'capabilities': u'boot_option:local'} |
| provision_state | available |
| provision_updated_at | 2017-02-28T02:17:41+00:00 |
| raid_config | |
| reservation | None |
| target_power_state | None |
| target_provision_state | None |
| target_raid_config | |
| updated_at | 2017-02-28T02:17:43+00:00 |
| uuid | 37561c05-5e7c-4983-9ed5-714edc94216c |
+------------------------+---------------------------------------------------------------------+
LOG
fule agent workflow inside baremetal(/var/log/fule-agent.log)
if provisoning is suceesss, the log like this: