Details
-
Bug
-
Status: Open
-
Major
-
Resolution: Unresolved
-
2.2.0
-
None
-
Red Hat 7.2 on Azure Cloud D14 VM
Description
Traceback (most recent call last):
File "/var/lib/ambari-agent/cache/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py", line 80, in <module>
HbaseClient().execute()
File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 219, in execute
method(env)
File "/var/lib/ambari-agent/cache/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py", line 35, in install
self.configure(env)
File "/var/lib/ambari-agent/cache/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py", line 40, in configure
hbase(name='client')
File "/usr/lib/python2.6/site-packages/ambari_commons/os_family_impl.py", line 89, in thunk
return fn(*args, **kwargs)
File "/var/lib/ambari-agent/cache/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py", line 56, in hbase
recursive = True
File "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line 154, in _init_
self.env.run()
File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 160, in run
self.run_action(resource, action)
File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 124, in run_action
provider_action()
File "/usr/lib/python2.6/site-packages/resource_management/core/providers/system.py", line 144, in action_create
raise Fail("Applying %s failed, looped symbolic links found while resolving %s" % (self.resource, path))
resource_management.core.exceptions.Fail: Applying Directory['/usr/hdp/current/hbase-client/conf'] failed, looped symbolic links found while resolving /usr/hdp/current/hbase-client/conf
stdout: /var/lib/ambari-agent/data/output-1669.txt
2016-10-14 17:03:52,597 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf
2016-10-14 17:03:52,598 - Group['hadoop'] {}
2016-10-14 17:03:52,599 - Group['users'] {}
2016-10-14 17:03:52,599 - User['hive']
2016-10-14 17:03:52,600 - User['zookeeper']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']}2016-10-14 17:03:52,601 - User['ams']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']}2016-10-14 17:03:52,601 - User['oozie']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']}2016-10-14 17:03:52,602 - User['ambari-qa']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']}2016-10-14 17:03:52,602 - User['tez']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'users']}2016-10-14 17:03:52,603 - User['hdfs']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']}2016-10-14 17:03:52,604 - User['sqoop']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']}2016-10-14 17:03:52,604 - User['yarn']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']}2016-10-14 17:03:52,605 - User['hcat']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']}2016-10-14 17:03:52,606 - User['mapred']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']}2016-10-14 17:03:52,606 - User['hbase']
{'gid': 'hadoop', 'fetch_nonlocal_groups': True, 'groups': [u'hadoop']}2016-10-14 17:03:52,607 - File['/var/lib/ambari-agent/tmp/changeUid.sh']
{'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}2016-10-14 17:03:52,608 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa']
{'not_if': '(test $(id -u ambari-qa) -gt 1000) || (false)'}2016-10-14 17:03:52,613 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa'] due to not_if
2016-10-14 17:03:52,613 - Directory['/tmp/hbase-hbase']
2016-10-14 17:03:52,614 - File['/var/lib/ambari-agent/tmp/changeUid.sh']
{'content': StaticFile('changeToSecureUid.sh'), 'mode': 0555}2016-10-14 17:03:52,615 - Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase']
{'not_if': '(test $(id -u hbase) -gt 1000) || (false)'}2016-10-14 17:03:52,619 - Skipping Execute['/var/lib/ambari-agent/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/tmp/hbase-hbase'] due to not_if
2016-10-14 17:03:52,619 - Group['hdfs'] {}
2016-10-14 17:03:52,620 - User['hdfs']
2016-10-14 17:03:52,620 - FS Type:
2016-10-14 17:03:52,620 - Directory['/etc/hadoop']
2016-10-14 17:03:52,632 - File['/usr/hdp/current/hadoop-client/conf/hadoop-env.sh']
{'content': InlineTemplate(...), 'owner': 'hdfs', 'group': 'hadoop'}2016-10-14 17:03:52,633 - Directory['/var/lib/ambari-agent/tmp/hadoop_java_io_tmpdir']
{'owner': 'hdfs', 'group': 'hadoop', 'mode': 0777}2016-10-14 17:03:52,643 - Repository['HDP-2.4'] {'base_url': 'http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.4.3.0', 'action': ['create'], 'components': [u'HDP', 'main'], 'repo_template': '[{{repo_id}}]\nname=repo_id\n
{% if mirror_list %}mirrorlist={{mirror_list}}{% else %}baseurl={{base_url}}{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP', 'mirror_list': None}2016-10-14 17:03:52,650 - File['/etc/yum.repos.d/HDP.repo'] {'content': '[HDP-2.4]\nname=HDP-2.4\nbaseurl=http://public-repo-1.hortonworks.com/HDP/centos7/2.x/updates/2.4.3.0\n\npath=/\nenabled=1\ngpgcheck=0'}
2016-10-14 17:03:52,650 - Repository['HDP-UTILS-1.1.0.20'] {'base_url': 'http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos7', 'action': ['create'], 'components': [u'HDP-UTILS', 'main'], 'repo_template': '[{{repo_id}}]\nname=repo_id\n{% if mirror_list %}
mirrorlist=mirror_list
{% else %}baseurl=base_url
{% endif %}\n\npath=/\nenabled=1\ngpgcheck=0', 'repo_file_name': 'HDP-UTILS', 'mirror_list': None}
2016-10-14 17:03:52,653 - File['/etc/yum.repos.d/HDP-UTILS.repo']
2016-10-14 17:03:52,653 - Package['unzip']
{'retry_on_repo_unavailability': False, 'retry_count': 5}2016-10-14 17:03:52,752 - Skipping installation of existing package unzip
2016-10-14 17:03:52,752 - Package['curl']
2016-10-14 17:03:52,763 - Skipping installation of existing package curl
2016-10-14 17:03:52,764 - Package['hdp-select']
2016-10-14 17:03:52,774 - Skipping installation of existing package hdp-select
2016-10-14 17:03:52,945 - Using hadoop conf dir: /usr/hdp/current/hadoop-client/conf
2016-10-14 17:03:52,952 - Package['hbase_2_4_*']
2016-10-14 17:03:53,050 - Skipping installation of existing package hbase_2_4_*
2016-10-14 17:03:53,050 - Package['phoenix_2_4_*']
2016-10-14 17:03:53,060 - Skipping installation of existing package phoenix_2_4_*
2016-10-14 17:03:53,061 - Directory['/etc/hbase']
2016-10-14 17:03:53,062 - Directory['/usr/hdp/current/hbase-client/conf']
{'owner': 'hbase', 'group': 'hadoop', 'recursive': True}2016-10-14 17:03:53,062 - Creating directory Directory['/usr/hdp/current/hbase-client/conf'] since it doesn't exist.