Skip to content

Commit

Permalink
Allow running product tests against S3
Browse files Browse the repository at this point in the history
  • Loading branch information
sopel39 committed Mar 26, 2018
1 parent 45913d5 commit bb8b6cb
Show file tree
Hide file tree
Showing 3 changed files with 167 additions and 0 deletions.
39 changes: 39 additions & 0 deletions presto-hive-hadoop2/bin/run_hive_s3_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#!/bin/bash

set -euo pipefail -x

# http://stackoverflow.com/questions/3572030/bash-script-absolute-path-with-osx
function absolutepath() {
[[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"
}

SCRIPT_DIR=$(dirname $(absolutepath "$0"))
. ${SCRIPT_DIR}/common.sh

start_docker_containers

# insert AWS credentials
exec_in_hadoop_master_container cp /etc/hadoop/conf/core-site.xml.s3-template /etc/hadoop/conf/core-site.xml
exec_in_hadoop_master_container sed -i -e "s|%AWS_ACCESS_KEY%|${AWS_ACCESS_KEY_ID}|g" -e "s|%AWS_SECRET_KEY%|${AWS_SECRET_ACCESS_KEY}|g" -e "s|%S3_BUCKET_ENDPOINT%|${S3_BUCKET_ENDPOINT}|g" \
/etc/hadoop/conf/core-site.xml

# create test table
exec_in_hadoop_master_container /usr/bin/hive -e "CREATE EXTERNAL TABLE presto_test_s3(t_bigint bigint) LOCATION 's3a://${S3_BUCKET}/presto_test_s3/'"

stop_unnecessary_hadoop_services

# run product tests
pushd $PROJECT_ROOT
set +e
./mvnw -pl presto-hive-hadoop2 test -P test-hive-hadoop2-s3 \
-DHADOOP_USER_NAME=hive \
-Dhive.hadoop2.s3.awsAccessKey=${AWS_ACCESS_KEY_ID} \
-Dhive.hadoop2.s3.awsSecretKey=${AWS_SECRET_ACCESS_KEY} \
-Dhive.hadoop2.s3.writableBucket=${S3_BUCKET}
EXIT_CODE=$?
set -e
popd

cleanup_docker_containers

exit ${EXIT_CODE}
1 change: 1 addition & 0 deletions presto-hive-hadoop2/conf/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ services:
volumes:
- ../../presto-hive/src/test/sql:/files/sql:ro
- ./files/words:/usr/share/dict/words:ro
- ./files/core-site.xml.s3-template:/etc/hadoop/conf/core-site.xml.s3-template:ro
dnsmasq:
hostname: dnsmasq
image: 'prestodb/dns:1'
Expand Down
127 changes: 127 additions & 0 deletions presto-hive-hadoop2/conf/files/core-site.xml.s3-template
Original file line number Diff line number Diff line change
@@ -0,0 +1,127 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<configuration>

<property>
<name>fs.defaultFS</name>
<value>hdfs://hadoop-master:8020</value>
</property>

<property>
<name>fs.default.name</name>
<value>hdfs://hadoop-master:9000</value>
</property>

<!-- OOZIE proxy user setting -->
<property>
<name>hadoop.proxyuser.oozie.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.oozie.groups</name>
<value>*</value>
</property>

<!-- HTTPFS proxy user setting -->
<property>
<name>hadoop.proxyuser.httpfs.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.httpfs.groups</name>
<value>*</value>
</property>

<!-- Llama proxy user setting -->
<property>
<name>hadoop.proxyuser.llama.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.llama.groups</name>
<value>*</value>
</property>

<!-- Hue proxy user setting -->
<property>
<name>hadoop.proxyuser.hue.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.hue.groups</name>
<value>*</value>
</property>

<!-- Mapred proxy user setting -->
<property>
<name>hadoop.proxyuser.mapred.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.mapred.groups</name>
<value>*</value>
</property>

<!-- Hive impersonation -->
<property>
<name>hadoop.proxyuser.hive.hosts</name>
<value>*</value>
</property>

<property>
<name>hadoop.proxyuser.hive.groups</name>
<value>*</value>
</property>

<!-- Hdfs impersonation -->
<property>
<name>hadoop.proxyuser.hdfs.groups</name>
<value>*</value>
</property>

<property>
<name>hadoop.proxyuser.hdfs.hosts</name>
<value>*</value>
</property>

<property>
<name>fs.s3.awsAccessKeyId</name>
<value>%AWS_ACCESS_KEY%</value>
</property>

<property>
<name>fs.s3.awsSecretAccessKey</name>
<value>%AWS_SECRET_KEY%</value>
</property>

<property>
<name>fs.s3a.access.key</name>
<value>%AWS_ACCESS_KEY%</value>
</property>

<property>
<name>fs.s3a.secret.key</name>
<value>%AWS_SECRET_KEY%</value>
</property>

<property>
<name>fs.s3a.endpoint</name>
<value>%S3_BUCKET_ENDPOINT%</value>
</property>

</configuration>

0 comments on commit bb8b6cb

Please sign in to comment.