Skip to content

Commit

Permalink
Fix issues in code style
Browse files Browse the repository at this point in the history
  • Loading branch information
chenghao-intel committed Jul 28, 2014
1 parent 27540ba commit 888968f
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,10 @@ private[hive] sealed trait TableReader {
* data warehouse directory.
*/
private[hive]
class HadoopTableReader(@transient attributes: Seq[Attribute],
@transient relation: MetastoreRelation, @transient sc: HiveContext)
class HadoopTableReader(
@transient attributes: Seq[Attribute],
@transient relation: MetastoreRelation,
@transient sc: HiveContext)
extends TableReader {

// Choose the minimum number of splits. If mapred.map.tasks is set, then use that unless
Expand Down Expand Up @@ -135,7 +137,8 @@ class HadoopTableReader(@transient attributes: Seq[Attribute],
* subdirectory of each partition being read. If None, then all files are accepted.
*/
def makeRDDForPartitionedTable(
partitionToDeserializer: Map[HivePartition, Class[_ <: Deserializer]],
partitionToDeserializer: Map[HivePartition,
Class[_ <: Deserializer]],
filterOpt: Option[PathFilter]): RDD[Row] = {
val hivePartitionRDDs = partitionToDeserializer.map { case (partition, partDeserializer) =>
val partDesc = Utilities.getPartitionDesc(partition)
Expand Down Expand Up @@ -261,8 +264,11 @@ private[hive] object HadoopTableReader extends HiveInspectors {
*
* @return Iterable Row object that transformed from the given iterable input.
*/
def fillObject(iter: Iterator[Writable], deserializer: Deserializer,
attrs: Seq[(Attribute, Int)], row: GenericMutableRow): Iterator[Row] = {
def fillObject(
iter: Iterator[Writable],
deserializer: Deserializer,
attrs: Seq[(Attribute, Int)],
row: GenericMutableRow): Iterator[Row] = {
val soi = deserializer.getObjectInspector().asInstanceOf[StructObjectInspector]
// get the field references according to the attributes(output of the reader) required
val fieldRefs = attrs.map { case (attr, idx) => (soi.getStructFieldRef(attr.name), idx) }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ case class HiveTableScan(
val columnInternalNames = neededColumnIDs.map(HiveConf.getColumnInternalName(_)).mkString(",")

if (attributes.size == relation.output.size) {
// TODO what if duplicated attributes queried?
// SQLContext#pruneFilterProject guarantees no duplicated value in `attributes`
ColumnProjectionUtils.setFullyReadColumns(hiveConf)
} else {
ColumnProjectionUtils.appendReadColumnIDs(hiveConf, neededColumnIDs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,10 @@

package org.apache.spark.sql.hive.execution

import org.scalatest.{BeforeAndAfterAll, FunSuite}

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.hive.test.TestHive
import org.scalatest.{BeforeAndAfterAll, FunSuite}

class HiveTableScanSuite extends HiveComparisonTest {
// MINOR HACK: You must run a query before calling reset the first time.
Expand All @@ -31,17 +32,17 @@ class HiveTableScanSuite extends HiveComparisonTest {
| 'org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe'
| STORED AS RCFILE
""".stripMargin)
TestHive.hql("""from src
| insert into table part_scan_test PARTITION (ds='2010-01-01')
| select 100,100 limit 1
TestHive.hql("""FROM src
| INSERT INTO TABLE part_scan_test PARTITION (ds='2010-01-01')
| SELECT 100,100 LIMIT 1
""".stripMargin)
TestHive.hql("""ALTER TABLE part_scan_test set SERDE
TestHive.hql("""ALTER TABLE part_scan_test SET SERDE
| 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
""".stripMargin)
TestHive.hql("""from src insert into table part_scan_test PARTITION (ds='2010-01-02')
| select 200,200 limit 1
TestHive.hql("""FROM src INSERT INTO TABLE part_scan_test PARTITION (ds='2010-01-02')
| SELECT 200,200 LIMIT 1
""".stripMargin)

createQueryTest("partition_based_table_scan_with_different_serde",
"select * from part_scan_test", false)
"SELECT * from part_scan_test", false)
}

0 comments on commit 888968f

Please sign in to comment.