Skip to content

Commit

Permalink
[Enhancement](hdfs) Support loading hdfs config from hdfs-site.xml (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
caiconghui authored Aug 8, 2022
1 parent 37d1180 commit 411254c
Show file tree
Hide file tree
Showing 9 changed files with 38 additions and 8 deletions.
2 changes: 1 addition & 1 deletion bin/start_fe.sh
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ echo $final_java_opt >> $LOG_DIR/fe.out
for f in $DORIS_HOME/lib/*.jar; do
CLASSPATH=$f:${CLASSPATH}
done
export CLASSPATH=${CLASSPATH}:${DORIS_HOME}/lib
export CLASSPATH=${CLASSPATH}:${DORIS_HOME}/lib:${DORIS_HOME}/conf

pidfile=$PID_DIR/fe.pid

Expand Down
1 change: 1 addition & 0 deletions build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,7 @@ if [ ${BUILD_FE} -eq 1 ]; then

cp -r -p ${DORIS_HOME}/bin/*_fe.sh ${DORIS_OUTPUT}/fe/bin/
cp -r -p ${DORIS_HOME}/conf/fe.conf ${DORIS_OUTPUT}/fe/conf/
cp -r -p ${DORIS_HOME}/conf/*.xml ${DORIS_OUTPUT}/fe/conf/
rm -rf ${DORIS_OUTPUT}/fe/lib/*
cp -r -p ${DORIS_HOME}/fe/fe-core/target/lib/* ${DORIS_OUTPUT}/fe/lib/
rm -f ${DORIS_OUTPUT}/fe/lib/palo-fe.jar
Expand Down
23 changes: 23 additions & 0 deletions conf/hdfs-site.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>

<!-- Put site-specific property overrides in this file. -->

<configuration>
</configuration>
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -86,7 +87,7 @@ private FileSystem getFileSystem(String remotePath) throws UserException {
checkHDFS(caseInsensitiveProperties);
String hdfsFsName = caseInsensitiveProperties.get(BrokerUtil.HADOOP_FS_NAME).toString();
String username = caseInsensitiveProperties.get(BrokerUtil.HADOOP_USER_NAME).toString();
Configuration conf = new Configuration();
Configuration conf = new HdfsConfiguration();
boolean isSecurityEnabled = false;
for (Map.Entry<String, String> propEntry : caseInsensitiveProperties.entrySet()) {
conf.set(propEntry.getKey(), propEntry.getValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
Expand Down Expand Up @@ -206,7 +207,7 @@ public static String getHiveDataFiles(HiveTable hiveTable, ExprNodeGenericFuncDe

// create Configuration for the given properties
private static Configuration getConfiguration(Map<String, String> properties, boolean onS3) {
Configuration configuration = new Configuration(false);
Configuration configuration = new HdfsConfiguration();
for (Map.Entry<String, String> entry : properties.entrySet()) {
if (!entry.getKey().equals(HiveTable.HIVE_METASTORE_URIS)) {
configuration.set(entry.getKey(), entry.getValue());
Expand Down Expand Up @@ -347,7 +348,7 @@ public static List<Partition> getHivePartitions(String metaStoreUris, Table remo
null, (short) -1, hivePartitions);
} catch (TException e) {
LOG.warn("Hive metastore thrift exception: {}", e.getMessage());
throw new DdlException("Connect hive metastore failed.");
throw new DdlException("Connect hive metastore failed: " + e.getMessage());
} finally {
client.close();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -185,7 +186,7 @@ public static void parseFile(String path, BrokerDesc brokerDesc, List<TBrokerFil
}
String fsName = brokerDesc.getProperties().get(HADOOP_FS_NAME);
String userName = brokerDesc.getProperties().get(HADOOP_USER_NAME);
Configuration conf = new Configuration();
Configuration conf = new HdfsConfiguration();
boolean isSecurityEnabled = false;
for (Map.Entry<String, String> propEntry : brokerDesc.getProperties().entrySet()) {
conf.set(propEntry.getKey(), propEntry.getValue());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.apache.doris.catalog.IcebergProperty;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.iceberg.Table;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
Expand All @@ -45,7 +46,7 @@ public HiveCatalog() {
@Override
public void initialize(IcebergProperty icebergProperty) {
// set hadoop conf
Configuration conf = new Configuration();
Configuration conf = new HdfsConfiguration();
hiveCatalog.setConf(conf);
// initialize hive catalog
Map<String, String> catalogProperties = new HashMap<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
Expand Down Expand Up @@ -235,7 +236,7 @@ private InputSplit[] getSplits() throws UserException, IOException {
}


Configuration configuration = new Configuration();
Configuration configuration = new HdfsConfiguration();
InputFormat<?, ?> inputFormat = HiveUtil.getInputFormat(configuration, inputFormatName, false);
// alway get fileSplits from inputformat,
// because all hoodie input format have UseFileSplitsFromInputFormat annotation
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
Expand Down Expand Up @@ -154,7 +155,7 @@ private List<InputSplit> getSplitsByPath(InputFormat<?, ?> inputFormat, Configur


protected Configuration setConfiguration() {
Configuration conf = new Configuration();
Configuration conf = new HdfsConfiguration();
Map<String, String> dfsProperties = hmsTable.getDfsProperties();
for (Map.Entry<String, String> entry : dfsProperties.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
Expand Down

0 comments on commit 411254c

Please sign in to comment.