Skip to content

HIVE-25616: Hive-24741 backport to 2.3 #2730

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Oct 19, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@
import org.slf4j.LoggerFactory;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;

Expand Down Expand Up @@ -2565,34 +2566,67 @@ private Collection getPartitionPsQueryResults(String dbName, String tableName,
return (Collection) query.execute(dbName, tableName, partNameMatcher);
}

/**
* If partVals all the values are empty strings, it means we are returning
* all the partitions and hence we can attempt to use a directSQL equivalent API which
* is considerably faster.
* @param partVals The partitions values used to filter out the partitions.
* @return true only when partVals is non-empty and contains only empty strings,
* otherwise false. If user or groups is valid then returns false since the directSQL
* doesn't support partition privileges.
*/
private boolean canTryDirectSQL(List<String> partVals) {
if (partVals.isEmpty()) {
return false;
}
for (String val : partVals) {
if (val != null && !val.isEmpty()) {
return false;
}
}
return true;
}

@Override
public List<Partition> listPartitionsPsWithAuth(String db_name, String tbl_name,
List<String> part_vals, short max_parts, String userName, List<String> groupNames)
throws MetaException, InvalidObjectException, NoSuchObjectException {
List<Partition> partitions = new ArrayList<Partition>();
List<Partition> partitions = new ArrayList<>();
boolean success = false;
QueryWrapper queryWrapper = new QueryWrapper();

try {
openTransaction();
LOG.debug("executing listPartitionNamesPsWithAuth");
Collection parts = getPartitionPsQueryResults(db_name, tbl_name,
part_vals, max_parts, null, queryWrapper);

MTable mtbl = getMTable(db_name, tbl_name);
if (mtbl == null) {
throw new NoSuchObjectException(db_name + "." + tbl_name + " table not found");
}
boolean getauth = null != userName && null != groupNames &&
"TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"));
if(!getauth && canTryDirectSQL(part_vals)) {
LOG.debug(
"Redirecting to directSQL enabled API: db: {} tbl: {} partVals: {}",
db_name, tbl_name, Joiner.on(',').join(part_vals));
return getPartitions(db_name, tbl_name, -1);
}
LOG.debug("executing listPartitionNamesPsWithAuth");
Collection parts = getPartitionPsQueryResults(db_name, tbl_name, part_vals,
max_parts, null, queryWrapper);
for (Object o : parts) {
Partition part = convertToPart((MPartition) o);
//set auth privileges
if (null != userName && null != groupNames &&
"TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl
.getPartitionKeys()), part.getValues());
PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name,
tbl_name, partName, userName, groupNames);
part.setPrivileges(partAuth);
}
// set auth privileges
String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl
.getPartitionKeys()), part.getValues());
PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name,
tbl_name, partName, userName, groupNames);
part.setPrivileges(partAuth);
partitions.add(part);
}
success = commitTransaction();
} catch (InvalidObjectException | NoSuchObjectException | MetaException e) {
throw e;
} catch (Exception e) {
throw new MetaException(e.getMessage());
} finally {
rollbackAndCleanup(success, queryWrapper);
}
Expand Down
56 changes: 56 additions & 0 deletions ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
Original file line number Diff line number Diff line change
Expand Up @@ -679,6 +679,62 @@ public void testPartition() throws Throwable {
System.err.println(StringUtils.stringifyException(e));
assertTrue("Unable to create parition for table: " + tableName, false);
}
part_spec.clear();
part_spec.put("ds", "2008-04-08");
part_spec.put("hr", "13");
try {
hm.createPartition(tbl, part_spec);
} catch (HiveException e) {
System.err.println(StringUtils.stringifyException(e));
assertTrue("Unable to create parition for table: " + tableName, false);
}
part_spec.clear();
part_spec.put("ds", "2008-04-08");
part_spec.put("hr", "14");
try {
hm.createPartition(tbl, part_spec);
} catch (HiveException e) {
System.err.println(StringUtils.stringifyException(e));
assertTrue("Unable to create parition for table: " + tableName, false);
}
part_spec.clear();
part_spec.put("ds", "2008-04-07");
part_spec.put("hr", "12");
try {
hm.createPartition(tbl, part_spec);
} catch (HiveException e) {
System.err.println(StringUtils.stringifyException(e));
assertTrue("Unable to create parition for table: " + tableName, false);
}
part_spec.clear();
part_spec.put("ds", "2008-04-07");
part_spec.put("hr", "13");
try {
hm.createPartition(tbl, part_spec);
} catch (HiveException e) {
System.err.println(StringUtils.stringifyException(e));
assertTrue("Unable to create parition for table: " + tableName, false);
}

Map<String, String> partialSpec = new HashMap<>();
partialSpec.put("ds", "2008-04-07");
assertEquals(2, hm.getPartitions(tbl, partialSpec).size());

partialSpec = new HashMap<>();
partialSpec.put("ds", "2008-04-08");
assertEquals(3, hm.getPartitions(tbl, partialSpec).size());

partialSpec = new HashMap<>();
partialSpec.put("hr", "13");
assertEquals(2, hm.getPartitions(tbl, partialSpec).size());

partialSpec = new HashMap<>();
assertEquals(5, hm.getPartitions(tbl, partialSpec).size());

partialSpec = new HashMap<>();
partialSpec.put("hr", "14");
assertEquals(1, hm.getPartitions(tbl, partialSpec).size());

hm.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tableName);
} catch (Throwable e) {
System.err.println(StringUtils.stringifyException(e));
Expand Down