Skip to content

Commit

Permalink
KYLIN-3500 fix dumplicated tablename at createSqoopToFlatHiveStep whe…
Browse files Browse the repository at this point in the history
…n use jdbc datasource
  • Loading branch information
microbearz authored and shaofengshi committed Aug 20, 2018
1 parent b4f2155 commit 2eba8e2
Showing 1 changed file with 4 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -184,15 +184,11 @@ private AbstractExecutable createSqoopToFlatHiveStep(String jobWorkingDir, Strin
}
}

//related to "kylin.engine.mr.config-override.mapreduce.job.queuename"
String queueName = getSqoopJobQueueName(config);
String cmd = String.format("%s/sqoop import -Dorg.apache.sqoop.splitter.allow_text_splitter=true "
+ generateSqoopConfigArgString()
String cmd = String.format("%s/sqoop import" + generateSqoopConfigArgString()
+ "--connect \"%s\" --driver %s --username %s --password %s --query \"%s AND \\$CONDITIONS\" "
+ "--target-dir %s/%s --split-by %s.%s --boundary-query \"%s\" --null-string '' "
+ "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '' "
+ "--fields-terminated-by '%s' --num-mappers %d", sqoopHome, connectionUrl, driverClass, jdbcUser,
jdbcPass, selectSql, jobWorkingDir, hiveTable, splitTable, splitColumn, bquery, filedDelimiter,
mapperNum);
jdbcPass, selectSql, jobWorkingDir, hiveTable, splitColumn, bquery, filedDelimiter, mapperNum);
logger.debug(String.format("sqoop cmd:%s", cmd));
CmdStep step = new CmdStep();
step.setCmd(cmd);
Expand All @@ -212,7 +208,7 @@ protected String generateSqoopConfigArgString() {
config.putAll(SourceConfigurationUtil.loadSqoopConfiguration());
config.putAll(kylinConfig.getSqoopConfigOverride());

StringBuilder args = new StringBuilder();
StringBuilder args = new StringBuilder(" -Dorg.apache.sqoop.splitter.allow_text_splitter=true ");
for (Map.Entry<String, String> entry : config.entrySet()) {
args.append(" -D" + entry.getKey() + "=" + entry.getValue() + " ");
}
Expand Down

0 comments on commit 2eba8e2

Please sign in to comment.