@@ -394,7 +394,8 @@ def cm_server_not_dead(primary_node):
394394 _configure_for_streamsets_after_start (deployment = deployment ,
395395 cluster_name = DEFAULT_CLUSTER_NAME ,
396396 cluster = cluster , quiet = not args .verbose ,
397- kerberos_enabled = args .kerberos )
397+ kerberos_enabled = args .kerberos ,
398+ kerberos_principals = args .kerberos_principals )
398399
399400
400401def _configure_kdc (cluster , kerberos_principals , kerberos_ticket_lifetime , quiet ):
@@ -1219,7 +1220,8 @@ def _configure_yarn(deployment, cluster, cluster_name):
12191220 configs )
12201221
12211222
1222- def _configure_for_streamsets_after_start (deployment , cluster_name , cluster , quiet , kerberos_enabled ):
1223+ def _configure_for_streamsets_after_start (deployment , cluster_name , cluster , quiet ,
1224+ kerberos_enabled , kerberos_principals ):
12231225 # Following is needed for Kerberos and Kafka to work correctly.
12241226 logger .info ('Copying streamsets keytab to a fixed location which is shared on all clustered nodes ...' )
12251227 commands = [('cp "$(find /var/run/cloudera-scm-agent/process/*streamsets-DATACOLLECTOR -maxdepth 0 -mindepth 0 | '
@@ -1232,6 +1234,14 @@ def _configure_for_streamsets_after_start(deployment, cluster_name, cluster, qui
12321234 for service
12331235 in deployment .get_cluster_services (cluster_name = DEFAULT_CLUSTER_NAME )}
12341236
1237+ if 'HDFS' in cluster_service_types and kerberos_principals :
1238+ if 'sdctest' in kerberos_principals .split (',' ):
1239+ # Following is needed as tests use HDFS directory /tmp/out.
1240+ commands = ['hadoop fs -mkdir /tmp/out' ,
1241+ 'hadoop fs -chown sdctest /tmp/out' ,
1242+ 'hadoop fs -chmod 1777 /tmp/out' ]
1243+ _execute_commands_against_kerberized_hdfs (cluster , commands , quiet )
1244+
12351245 if 'SOLR' in cluster_service_types :
12361246 SOLR_CONFIG_FILE_PATH = '/root/sample_collection_solr_configs/conf/solrconfig.xml'
12371247
0 commit comments