[ovirt-users] Cannot install latest upstream master

knarra knarra at redhat.com
Wed Nov 16 10:13:27 UTC 2016


On 11/16/2016 03:37 PM, Simone Tiraboschi wrote:
>
>
> On Wed, Nov 16, 2016 at 10:56 AM, knarra <knarra at redhat.com 
> <mailto:knarra at redhat.com>> wrote:
>
>     On 11/16/2016 03:07 PM, Martin Perina wrote:
>>
>>
>>     On Wed, Nov 16, 2016 at 9:48 AM, knarra <knarra at redhat.com
>>     <mailto:knarra at redhat.com>> wrote:
>>
>>         On 11/16/2016 01:34 PM, Martin Perina wrote:
>>>         Hi,
>>>
>>>         could you please share log from engine-setup execution?
>>>
>>>         But I fear this is caused by [1] as we haven't done any
>>>         changes in aaa-jdbc extension for quite long time.
>>>         Sandro is it possible to remove or fix faulty slf4j package
>>>         from repo [2] as suggested in [1]?
>>>
>>>         Thanks
>>>
>>>         Martin
>>>
>>>         [1] https://bugzilla.redhat.com/show_bug.cgi?id=1394656
>>>         <https://bugzilla.redhat.com/show_bug.cgi?id=1394656>
>>>         [2]
>>>         http://cbs.centos.org/repos/virt7-ovirt-common-candidate/x86_64/os/Packages/
>>>         <http://cbs.centos.org/repos/virt7-ovirt-common-candidate/x86_64/os/Packages/>
>>>
>>         Hi Martin / simone,
>>
>>             Below is the link to the log file.
>>
>>         http://rhsqe-repo.lab.eng.blr.redhat.com/sosreports/HC/upstream/
>>         <http://rhsqe-repo.lab.eng.blr.redhat.com/sosreports/HC/upstream/>
>>
>>
>>     ​This hosted engine setup log, but we need to get engine-setup
>>     log from engine VM, which is located at
>>     /var/log/ovirt-engine/setup/ovirt-engine-setup-20161115085421-ee7ksg.log
>>
>>     to find out the real issue.
>>     Thanks
>>     Martin
>     Hi Martin,     I see that hosted engine vm is down and the log you
>     are asking for would be present in the engine vm right. Is there a
>     way that i can bring this up? 
>
> hosted-engine --vm-start
Hi Simone,     I tried this but this does not seem to be working. 
[root at zod ~]# hosted-engine --vm-start VM exists and is down, destroying 
it Machine destroyed 0428ddce-73cd-4f39-93ac-89906b71cffa     Status = 
WaitForLaunch     nicModel = rtl8139,pv     statusTime = 4823851490     
emulatedMachine = rhel6.5.0     pid = 0     clientIp =     devices = 
[{'index': '2', 'iface': 'ide', 'specParams': {}, 'readonly': 'true', 
'deviceId': 'c6cf7784-10a9-4b47-99ed-fa73e0083a3f', 'address': {'bus': 
'1', 'controller': '0', 'type': 'drive', 'target': '0', 'unit': '0'}, 
'device': 'cdrom', 'shared': 'false', 'path': '', 'type': 'disk'}, 
{'index': '0', 'iface': 'virtio', 'format': 'raw', 'bootOrder': '1', 
'poolID': '00000000-0000-0000-0000-000000000000', 'volumeID': 
'1a2b391a-1e26-47e8-b4c0-7fcdce61fd11', 'imageID': 
'7ec3bffe-2549-4832-a6ca-2fbd609b02c2', 'specParams': {}, 'readonly': 
'false', 'domainID': 'ef9cafbf-b740-4ac3-aa95-5f5ed24d21d3', 'optional': 
'false', 'deviceId': '7ec3bffe-2549-4832-a6ca-2fbd609b02c2', 'address': 
{'slot': '0x06', 'bus': '0x00', 'domain': '0x0000', 'type': 'pci', 
'function': '0x0'}, 'device': 'disk', 'shared': 'exclusive', 
'propagateErrors': 'off', 'type': 'disk'}, {'device': 'scsi', 'model': 
'virtio-scsi', 'type': 'controller'}, {'nicModel': 'pv', 'macAddr': 
'00:45:55:21:48:08', 'linkActive': 'true', 'network': 'ovirtmgmt', 
'filter': 'vdsm-no-mac-spoofing', 'specParams': {}, 'deviceId': 
'bc34f5f4-d9dd-40fb-ab4c-0e47542c1652', 'address': {'slot': '0x03', 
'bus': '0x00', 'domain': '0x0000', 'type': 'pci', 'function': '0x0'}, 
'device': 'bridge', 'type': 'interface'}, {'device': 'console', 
'specParams': {}, 'type': 'console', 'deviceId': 
'95da1064-ef94-4eae-bb4e-5cd05ae6e783', 'alias': 'console0'}, {'device': 
'vga', 'alias': 'video0', 'type': 'video'}, {'device': 'vnc', 'type': 
'graphics'}, {'device': 'virtio', 'specParams': {'source': 'random'}, 
'model': 'virtio', 'type': 'rng'}]     guestDiskMapping = {}     vmType 
= kvm     memSize = 16384     cpuType = Haswell-noTSX     
spiceSecureChannels = 
smain,sdisplay,sinputs,scursor,splayback,srecord,ssmartcard,susbredir 
     smp = 4     vmName = HostedEngine     display = vnc     maxVCpus = 
12 [root at zod ~]# hosted-engine --vm-status Failed to connect to broker, 
the number of errors has exceeded the limit (1) Cannot connect to the HA 
daemon, please check the logs. Traceback (most recent call last):   File 
"/usr/lib64/python2.7/runpy.py", line 162, in _run_module_as_main     
"__main__", fname, loader, pkg_name)   File 
"/usr/lib64/python2.7/runpy.py", line 72, in _run_code     exec code in 
run_globals   File 
"/usr/lib/python2.7/site-packages/ovirt_hosted_engine_setup/vm_status.py", 
line 173, in <module>     if not status_checker.print_status():   File 
"/usr/lib/python2.7/site-packages/ovirt_hosted_engine_setup/vm_status.py", 
line 104, in print_status     cluster_stats = self._get_cluster_stats() 
   File 
"/usr/lib/python2.7/site-packages/ovirt_hosted_engine_setup/vm_status.py", 
line 89, in _get_cluster_stats     cluster_stats = 
ha_cli.get_all_stats(client.HAClient.   File 
"/usr/lib/python2.7/site-packages/ovirt_hosted_engine_ha/client/client.py", 
line 102, in get_all_stats     with broker.connection(self._retries, 
self._wait):   File "/usr/lib64/python2.7/contextlib.py", line 17, in 
__enter__     return self.gen.next()   File 
"/usr/lib/python2.7/site-packages/ovirt_hosted_engine_ha/lib/brokerlink.py", 
line 99, in connection     self.connect(retries, wait)   File 
"/usr/lib/python2.7/site-packages/ovirt_hosted_engine_ha/lib/brokerlink.py", 
line 78, in connect     raise BrokerConnectionError(error_msg) 
ovirt_hosted_engine_ha.lib.exceptions.BrokerConnectionError: Failed to 
connect to broker, the number of errors has exceeded the limit (1) 
[root at zod ~]# vdsClient -s 0 list table 
0428ddce-73cd-4f39-93ac-89906b71cffa      0  HostedEngine         Down 
[root at zod ~]# vdsClient -s 0 list table 
0428ddce-73cd-4f39-93ac-89906b71cffa      0  HostedEngine         Down 
[root at zod ~]# vdsClient -s 0 list table 
0428ddce-73cd-4f39-93ac-89906b71cffa      0  HostedEngine         Down 
[root at zod ~]# vdsClient -s 0 list table 
0428ddce-73cd-4f39-93ac-89906b71cffa      0  HostedEngine         Down 
[root at zod ~]# vdsClient -s 0 list table 
0428ddce-73cd-4f39-93ac-89906b71cffa      0  HostedEngine         Down 
[root at zod ~]# vdsClient -s 0 list table 
0428ddce-73cd-4f39-93ac-89906b71cffa      0  HostedEngine         Down 
[root at zod ~]# vdsClient -s 0 list table 
0428ddce-73cd-4f39-93ac-89906b71cffa      0  HostedEngine         Down 
[root at zod ~]# vdsClient -s 0 list table 
0428ddce-73cd-4f39-93ac-89906b71cffa      0  HostedEngine         Down
>
>     Thanks kasturi.
>>
>>         Thanks kasturi
>>>         On Wed, Nov 16, 2016 at 8:03 AM, knarra <knarra at redhat.com
>>>         <mailto:knarra at redhat.com>> wrote:
>>>
>>>             Hi,
>>>
>>>                 I was  installing latest upstream master and i am
>>>             hitting the issue below. Can some one please let me know
>>>             if this a bug ? If yes, is this going to be fixed in the
>>>             next nightly?
>>>
>>>             [WARNING] OVF does not contain a valid image
>>>             description, using default. [ INFO  ] Detecting host
>>>             timezone.           Enter ssh public key for the root
>>>             user that will be used for the engine appliance (leave
>>>             it empty to skip): //root//.ssh/id_rsa.pub [ ERROR ] The
>>>             ssh key is not valid.           Enter ssh public key for
>>>             the root user that will be used for the engine appliance
>>>             (leave it empty to skip): [WARNING] Skipping appliance
>>>             root ssh public key           Do you want to enable ssh
>>>             access for the root user (yes, no, without-password)
>>>             [yes]: yes ERROR SNIPPET:
>>>             ============================================================
>>>                       |- [ ERROR ] Failed to execute stage 'Misc
>>>             configuration': Command '/usr/bin/ovirt-aaa-jdbc-tool'
>>>             failed to execute           |- [ INFO  ] Rolling back
>>>             database schema           |- [ INFO  ] Clearing Engine
>>>             database engine           |- [ INFO  ] Rolling back DWH
>>>             database schema           |- [ INFO  ] Clearing DWH
>>>             database ovirt_engine_history           |- [ INFO  ]
>>>             Stage: Clean up           |-           Log file is
>>>             located at
>>>             /var/log/ovirt-engine/setup/ovirt-engine-setup-20161115140627-er36oa.log
>>>                       |- [ INFO  ] Generating answer file
>>>             '/var/lib/ovirt-engine/setup/answers/20161115140829-setup.co
>>>             <http://20161115140829-setup.co>nf'           |- [ INFO 
>>>             ] Stage: Pre-termination           |- [ INFO  ] Stage:
>>>             Termination           |- [ ERROR ] Execution of setup
>>>             failed           |- HE_APPLIANCE_ENGINE_SETUP_FAIL [
>>>             ERROR ] Engine setup failed on the appliance [ ERROR ]
>>>             Failed to execute stage 'Closing up': Engine setup
>>>             failed on the appliance Please check its log on the
>>>             appliance. [ INFO  ] Stage: Clean up [ INFO  ]
>>>             Generating answer file
>>>             '/var/lib/ovirt-hosted-engine-setup/answers/answers-20161115193834.conf'
>>>             [ INFO  ] Stage: Pre-termination [ INFO  ] Stage:
>>>             Termination [ ERROR ] Hosted Engine deployment failed:
>>>             this system is not reliable, please check the issue,fix
>>>             and redeploy           Log file is located at
>>>             /var/log/ovirt-hosted-engine-setup/ovirt-hosted-engine-setup-20161115191145-hr3nat.log
>>>             [root at rhsqa-grafton4 ~]#
>>>
>>>             Thanks
>>>
>>>             kasturi.
>>>
>>>             _______________________________________________ Users
>>>             mailing list Users at ovirt.org <mailto:Users at ovirt.org>
>>>             http://lists.ovirt.org/mailman/listinfo/users
>>>             <http://lists.ovirt.org/mailman/listinfo/users> 
>>>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.ovirt.org/pipermail/users/attachments/20161116/cf28324c/attachment-0001.html>


More information about the Users mailing list