Skip to content

Commit

Permalink
HIVE-24092: Implement jdbc methods invoked by Calcite (Kishen Das, re…
Browse files Browse the repository at this point in the history
…viewed by Jesus Camacho Rodriguez)

Closes apache#1443
  • Loading branch information
kishendas authored Sep 10, 2020
1 parent 4079fc6 commit 0c040fb
Show file tree
Hide file tree
Showing 6 changed files with 202 additions and 17 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hive.jdbc;

import org.apache.hive.jdbc.HiveConnection;
import org.apache.hive.jdbc.Utils;
import org.apache.hive.jdbc.Utils.JdbcConnectionParams;

import java.util.LinkedHashMap;
import java.util.Properties;
import java.util.Map;
import java.util.HashMap;
import java.sql.SQLException;

import org.junit.Before;
import org.junit.Test;

import static org.junit.Assert.*;

/**
* TestHiveDatabaseMetaData.
*
*/
public class TestHiveDatabaseMetaData {

private Map<String, String> map = new LinkedHashMap<String, String>();
private JdbcConnectionParams jdbcConnectionParams = new JdbcConnectionParams();
private HiveDatabaseMetaData hiveDatabaseMetaData;
private HiveConnection connection = new HiveConnection();

@Before
public void setup() throws Exception {
jdbcConnectionParams.setHiveConfs(map);
connection.setConnParams(jdbcConnectionParams);
hiveDatabaseMetaData = new HiveDatabaseMetaData(connection, null, null);
}

@Test
public void testGetHiveDefaultNullsLastNullConfig() {
map.remove(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY);
try {
hiveDatabaseMetaData.nullsAreSortedLow();
fail("SQLException is expected");
} catch (Exception e) {
assertTrue(e.getMessage().contains("HIVE_DEFAULT_NULLS_LAST is not available"));
}
}

@Test
public void testGetHiveDefaultNullsLast() throws SQLException {
map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "true");
assertTrue(hiveDatabaseMetaData.getHiveDefaultNullsLast(map));

map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "false");
assertFalse(hiveDatabaseMetaData.getHiveDefaultNullsLast(map));

}

@Test
public void testNullsAreSortedHigh() throws SQLException {
map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "false");
assertFalse(hiveDatabaseMetaData.nullsAreSortedHigh());
map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "true");
assertTrue(hiveDatabaseMetaData.nullsAreSortedHigh());
}

@Test
public void testNullsAreSortedLow() throws SQLException {
map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "false");
assertTrue(hiveDatabaseMetaData.nullsAreSortedLow());
map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "true");
assertFalse(hiveDatabaseMetaData.nullsAreSortedLow());
}

@Test
public void testHiveConnectionUdateServerHiveConf() {
Map<String, String> serverHiveConf = new HashMap<>();
serverHiveConf.put("hive.server2.thrift.resultset.default.fetch.size", Integer.toString(87));
serverHiveConf.put("hive.default.nulls.last", "false");

jdbcConnectionParams.getHiveConfs().put(Utils.JdbcConnectionParams.HIVE_CONF_PREFIX
+ "hive.server2.thrift.resultset.default.fetch.size", "1534");
connection.updateServerHiveConf(serverHiveConf, jdbcConnectionParams);

// Client configuration should not be overridden by the server configuration.
assertEquals("1534", jdbcConnectionParams.getHiveConfs().get(Utils.JdbcConnectionParams.HIVE_CONF_PREFIX
+ "hive.server2.thrift.resultset.default.fetch.size"));

// Server configuration should be updated, since its not provided by the client.
assertEquals("false", jdbcConnectionParams.getHiveConfs()
.get(Utils.JdbcConnectionParams.HIVE_CONF_PREFIX + "hive.default.nulls.last"));

}
}
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,8 @@ public void setUp() throws Exception {
hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString());
hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, "binary");
hiveConf.setIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT, webuiPort);
hiveConf.setBoolVar(ConfVars.HIVE_DEFAULT_NULLS_LAST, true);

// Enable showing operation drilldown link
hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_SHOW_OPERATION_DRILLDOWN_LINK, true);
hiveServer2 = new HiveServer2();
Expand All @@ -95,6 +97,11 @@ public void testExecuteReturnWithInfoMessage() throws Exception {
TOpenSessionReq openReq = new TOpenSessionReq();
openReq.setClient_protocol(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V11);
TOpenSessionResp sessionResp = client.OpenSession(openReq);

Map<String, String> serverHiveConf = sessionResp.getConfiguration();
assertNotNull(serverHiveConf);
assertTrue(Boolean.parseBoolean(serverHiveConf.get(ConfVars.HIVE_DEFAULT_NULLS_LAST.varname)));

TSessionHandle sessHandle = sessionResp.getSessionHandle();
TExecuteStatementReq execReq = new TExecuteStatementReq(sessHandle, "select 1");
execReq.setRunAsync(true);
Expand Down
38 changes: 38 additions & 0 deletions jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

package org.apache.hive.jdbc;

import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
Expand Down Expand Up @@ -121,6 +122,7 @@
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import java.util.stream.Stream;

/**
* HiveConnection.
Expand Down Expand Up @@ -242,6 +244,13 @@ private static String makeDirectJDBCUrlFromConnectionParams(JdbcConnectionParams
return url.toString();
}

@VisibleForTesting
public HiveConnection() {
sessConfMap = null;
isEmbeddedMode = true;
initFetchSize = 0;
}

public HiveConnection(String uri, Properties info) throws SQLException {
setupLoginTimeout();
try {
Expand Down Expand Up @@ -866,6 +875,13 @@ private void openSession() throws SQLException {
try {
TOpenSessionResp openResp = client.OpenSession(openReq);

// Populate a given configuration from HS2 server HiveConf, only if that configuration
// is not already present in Connection parameter HiveConf i.e., client side configuration
// takes precedence over the server side configuration.
Map<String, String> serverHiveConf = openResp.getConfiguration();

updateServerHiveConf(serverHiveConf, connParams);

// validate connection
Utils.verifySuccess(openResp.getStatus());
if (!supportedProtocols.contains(openResp.getServerProtocolVersion())) {
Expand All @@ -891,6 +907,20 @@ private void openSession() throws SQLException {
isClosed = false;
}

@VisibleForTesting
public void updateServerHiveConf(Map<String, String> serverHiveConf, JdbcConnectionParams connParams) {
if (serverHiveConf != null) {
// Iterate over all Server configurations.
Stream.of(ConfVars.values()).forEach(conf -> {
String key = JdbcConnectionParams.HIVE_CONF_PREFIX + conf.varname;
// Update Server HiveConf, only if a given configuration is not already set from the client.
if (serverHiveConf.containsKey(conf.varname) && !connParams.getHiveConfs().containsKey(key)) {
connParams.getHiveConfs().put(key, serverHiveConf.get(conf.varname));
}
});
}
}

/**
* @return username from sessConfMap
*/
Expand Down Expand Up @@ -1714,6 +1744,14 @@ public TProtocolVersion getProtocol() {
return protocol;
}

public JdbcConnectionParams getConnParams() {
return connParams;
}

public JdbcConnectionParams setConnParams(JdbcConnectionParams jdbcConnectionParams) {
return connParams = jdbcConnectionParams;
}

public static TCLIService.Iface newSynchronizedClient(
TCLIService.Iface client) {
return (TCLIService.Iface) Proxy.newProxyInstance(
Expand Down
47 changes: 34 additions & 13 deletions jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,11 @@
import java.util.ArrayList;

import java.util.List;

import jline.internal.Log;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
import org.apache.hive.service.cli.TableSchema;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
Expand All @@ -31,6 +35,7 @@
import java.sql.SQLFeatureNotSupportedException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Map;
import java.util.jar.Attributes;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hive.service.cli.GetInfoType;
Expand Down Expand Up @@ -855,19 +860,19 @@ public boolean nullPlusNonNullIsNull() throws SQLException {
}

public boolean nullsAreSortedAtEnd() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return false;
}

public boolean nullsAreSortedAtStart() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return false;
}

public boolean nullsAreSortedHigh() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return getHiveDefaultNullsLast(connection.getConnParams().getHiveConfs());
}

public boolean nullsAreSortedLow() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return !getHiveDefaultNullsLast(connection.getConnParams().getHiveConfs());
}

public boolean othersDeletesAreVisible(int type) throws SQLException {
Expand Down Expand Up @@ -895,27 +900,27 @@ public boolean ownUpdatesAreVisible(int type) throws SQLException {
}

public boolean storesLowerCaseIdentifiers() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return true;
}

public boolean storesLowerCaseQuotedIdentifiers() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return true;
}

public boolean storesMixedCaseIdentifiers() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return false;
}

public boolean storesMixedCaseQuotedIdentifiers() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return false;
}

public boolean storesUpperCaseIdentifiers() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return false;
}

public boolean storesUpperCaseQuotedIdentifiers() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return false;
}

public boolean supportsANSI92EntryLevelSQL() throws SQLException {
Expand Down Expand Up @@ -1040,11 +1045,11 @@ public boolean supportsMinimumSQLGrammar() throws SQLException {
}

public boolean supportsMixedCaseIdentifiers() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return false;
}

public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
return false;
}

public boolean supportsMultipleOpenResults() throws SQLException {
Expand Down Expand Up @@ -1227,4 +1232,20 @@ private TGetInfoResp getServerInfo(TGetInfoType type) throws SQLException {
Utils.verifySuccess(resp.getStatus());
return resp;
}
}

/**
* This returns Hive configuration for HIVE_DEFAULT_NULLS_LAST.
*
* @param hiveConfs
* @return
*/
public static boolean getHiveDefaultNullsLast(Map<String, String> hiveConfs) throws SQLException {
if (hiveConfs == null) {
throw new SQLException("hiveConfs is not available");
}
if (hiveConfs.get(JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY) == null) {
throw new SQLException("HIVE_DEFAULT_NULLS_LAST is not available");
}
return Boolean.parseBoolean(hiveConfs.get(JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY));
}
}
8 changes: 7 additions & 1 deletion jdbc/src/java/org/apache/hive/jdbc/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.rpc.thrift.TStatus;
import org.apache.hive.service.rpc.thrift.TStatusCode;
Expand Down Expand Up @@ -168,7 +170,7 @@ public static class JdbcConnectionParams {
static final String SSL_TRUST_STORE_TYPE = "JKS";

private static final String HIVE_VAR_PREFIX = "hivevar:";
private static final String HIVE_CONF_PREFIX = "hiveconf:";
public static final String HIVE_CONF_PREFIX = "hiveconf:";
private String host = null;
private int port = 0;
private String jdbcUriString;
Expand All @@ -187,6 +189,10 @@ public static class JdbcConnectionParams {
private String currentHostZnodePath;
private final List<String> rejectedHostZnodePaths = new ArrayList<String>();

// HiveConf parameters
public static final String HIVE_DEFAULT_NULLS_LAST_KEY =
HIVE_CONF_PREFIX + HiveConf.ConfVars.HIVE_DEFAULT_NULLS_LAST.varname;

public JdbcConnectionParams() {
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.security.auth.login.LoginException;
Expand Down Expand Up @@ -326,10 +327,12 @@ public TOpenSessionResp OpenSession(TOpenSessionReq req) throws TException {

final int fetchSize = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE);

Map<String, String> map = new HashMap<>();
map.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE.varname, Integer.toString(fetchSize));
map.put(HiveConf.ConfVars.HIVE_DEFAULT_NULLS_LAST.varname,
String.valueOf(hiveConf.getBoolVar(ConfVars.HIVE_DEFAULT_NULLS_LAST)));
resp.setSessionHandle(sessionHandle.toTSessionHandle());
resp.setConfiguration(Collections
.singletonMap(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE.varname,
Integer.toString(fetchSize)));
resp.setConfiguration(map);
resp.setStatus(OK_STATUS);
ThriftCLIServerContext context =
(ThriftCLIServerContext)currentServerContext.get();
Expand Down

0 comments on commit 0c040fb

Please sign in to comment.