Skip to content

Commit fae7b02

Browse files
committed
Addressed styling issues mentioned by @marmbrus
1 parent 9265366 commit fae7b02

File tree

20 files changed

+93
-70
lines changed

20 files changed

+93
-70
lines changed
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.sql
19+
package catalyst
20+
21+
import org.apache.spark.sql.catalyst.expressions.Attribute
22+
import org.apache.spark.sql.catalyst.expressions.AttributeReference
23+
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
24+
import org.apache.spark.sql.catalyst.types._
25+
26+
/**
27+
* Provides experimental support for generating catalyst schemas for scala objects.
28+
*/
29+
object ScalaReflection {
30+
import scala.reflect.runtime.universe._
31+
32+
/** Returns a Sequence of attributes for the given case class type. */
33+
def attributesFor[T: TypeTag]: Seq[Attribute] = schemaFor[T] match {
34+
case s: StructType =>
35+
s.fields.map(f => AttributeReference(f.name, f.dataType, nullable = true)())
36+
}
37+
38+
/** Returns a catalyst DataType for the given Scala Type using reflection. */
39+
def schemaFor[T: TypeTag]: DataType = schemaFor(typeOf[T])
40+
41+
/** Returns a catalyst DataType for the given Scala Type using reflection. */
42+
def schemaFor(tpe: `Type`): DataType = tpe match {
43+
case t if t <:< typeOf[Product] =>
44+
val params = t.member("<init>": TermName).asMethod.paramss
45+
StructType(
46+
params.head.map(p => StructField(p.name.toString, schemaFor(p.typeSignature), true)))
47+
case t if t <:< typeOf[Seq[_]] =>
48+
val TypeRef(_, _, Seq(elementType)) = t
49+
ArrayType(schemaFor(elementType))
50+
case t if t <:< typeOf[String] => StringType
51+
case t if t <:< definitions.IntTpe => IntegerType
52+
case t if t <:< definitions.LongTpe => LongType
53+
case t if t <:< definitions.DoubleTpe => DoubleType
54+
case t if t <:< definitions.ShortTpe => ShortType
55+
case t if t <:< definitions.ByteTpe => ByteType
56+
}
57+
58+
implicit class CaseClassRelation[A <: Product : TypeTag](data: Seq[A]) {
59+
60+
/**
61+
* Implicitly added to Sequences of case class objects. Returns a catalyst logical relation
62+
* for the the data in the sequence.
63+
*/
64+
def asRelation: LocalRelation = {
65+
val output = attributesFor[A]
66+
LocalRelation(output, data)
67+
}
68+
}
69+
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/SqlParser.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@ import org.apache.spark.sql.catalyst.types._
3939
* for a SQL like language should checkout the HiveQL support in the sql/hive sub-project.
4040
*/
4141
class SqlParser extends StandardTokenParsers {
42-
4342
def apply(input: String): LogicalPlan = {
4443
phrase(query)(new lexical.Scanner(input)) match {
4544
case Success(r, x) => r

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,7 @@ package analysis
2121

2222
import scala.collection.mutable
2323

24-
import org.apache.spark.sql.catalyst.plans.logical.{Subquery, LogicalPlan}
25-
24+
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Subquery}
2625

2726
/**
2827
* An interface for looking up relations by name. Used by an [[Analyzer]].

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql
1919
package catalyst
2020
package analysis
2121

22-
import org.apache.spark.sql.catalyst.expressions.{Alias, NamedExpression, Expression, Attribute}
22+
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Expression, NamedExpression}
2323
import org.apache.spark.sql.catalyst.plans.logical.BaseRelation
2424
import org.apache.spark.sql.catalyst.trees.TreeNode
2525

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala

Lines changed: 0 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -26,51 +26,6 @@ import org.apache.spark.sql.catalyst.plans.logical._
2626
import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
2727
import org.apache.spark.sql.catalyst.types._
2828

29-
/**
30-
* Provides experimental support for generating catalyst schemas for scala objects.
31-
*/
32-
object ScalaReflection {
33-
import scala.reflect.runtime.universe._
34-
35-
/** Returns a Sequence of attributes for the given case class type. */
36-
def attributesFor[T: TypeTag]: Seq[Attribute] = schemaFor[T] match {
37-
case s: StructType =>
38-
s.fields.map(f => AttributeReference(f.name, f.dataType, nullable = true)())
39-
}
40-
41-
/** Returns a catalyst DataType for the given Scala Type using reflection. */
42-
def schemaFor[T: TypeTag]: DataType = schemaFor(typeOf[T])
43-
44-
/** Returns a catalyst DataType for the given Scala Type using reflection. */
45-
def schemaFor(tpe: `Type`): DataType = tpe match {
46-
case t if t <:< typeOf[Product] =>
47-
val params = t.member("<init>": TermName).asMethod.paramss
48-
StructType(
49-
params.head.map(p => StructField(p.name.toString, schemaFor(p.typeSignature), true)))
50-
case t if t <:< typeOf[Seq[_]] =>
51-
val TypeRef(_, _, Seq(elementType)) = t
52-
ArrayType(schemaFor(elementType))
53-
case t if t <:< typeOf[String] => StringType
54-
case t if t <:< definitions.IntTpe => IntegerType
55-
case t if t <:< definitions.LongTpe => LongType
56-
case t if t <:< definitions.DoubleTpe => DoubleType
57-
case t if t <:< definitions.ShortTpe => ShortType
58-
case t if t <:< definitions.ByteTpe => ByteType
59-
}
60-
61-
implicit class CaseClassRelation[A <: Product : TypeTag](data: Seq[A]) {
62-
63-
/**
64-
* Implicitly added to Sequences of case class objects. Returns a catalyst logical relation
65-
* for the the data in the sequence.
66-
*/
67-
def asRelation: LocalRelation = {
68-
val output = attributesFor[A]
69-
LocalRelation(output, data)
70-
}
71-
}
72-
}
73-
7429
/**
7530
* A collection of implicit conversions that create a DSL for constructing catalyst data structures.
7631
*

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/errors/package.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,9 @@ import org.apache.spark.sql.catalyst.trees.TreeNode
2525
*/
2626
package object errors {
2727

28-
class TreeNodeException[TreeType <: TreeNode[_]]
29-
(tree: TreeType, msg: String, cause: Throwable) extends Exception(msg, cause) {
28+
class TreeNodeException[TreeType <: TreeNode[_]](
29+
tree: TreeType, msg: String, cause: Throwable)
30+
extends Exception(msg, cause) {
3031

3132
// Yes, this is the same as a default parameter, but... those don't seem to work with SBT
3233
// external project dependencies for some reason.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package catalyst
2020
package expressions
2121

2222
import org.apache.spark.sql.catalyst.trees.TreeNode
23-
import org.apache.spark.sql.catalyst.types.{IntegralType, FractionalType, NumericType, DataType}
23+
import org.apache.spark.sql.catalyst.types.{DataType, FractionalType, IntegralType, NumericType}
2424
import org.apache.spark.sql.catalyst.errors.TreeNodeException
2525

2626
abstract class Expression extends TreeNode[Expression] {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Row.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,6 @@ class RowOrdering(ordering: Seq[SortOrder]) extends Ordering[Row] {
209209
}
210210
i += 1
211211
}
212-
0
212+
return 0
213213
}
214214
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ package expressions
2222
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
2323
import org.apache.spark.sql.catalyst.types._
2424

25-
2625
case class UnaryMinus(child: Expression) extends UnaryExpression {
2726
type EvaluatedType = Any
2827

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql
1919
package catalyst
2020
package expressions
2121

22-
import org.apache.spark.sql.catalyst.types.{StringType, BooleanType}
22+
import org.apache.spark.sql.catalyst.types.{BooleanType, StringType}
2323
import org.apache.spark.sql.catalyst.analysis.UnresolvedException
2424

2525
trait Predicate extends Expression {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/Optimizer.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,7 @@ object BooleanSimplification extends Rule[LogicalPlan] {
100100
*/
101101
object CombineFilters extends Rule[LogicalPlan] {
102102
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
103-
case ff@Filter(fc, nf@Filter(nc, grandChild)) => Filter(And(nc, fc), grandChild)
103+
case ff @ Filter(fc, nf @ Filter(nc, grandChild)) => Filter(And(nc, fc), grandChild)
104104
}
105105
}
106106

@@ -113,8 +113,8 @@ object CombineFilters extends Rule[LogicalPlan] {
113113
*/
114114
object PushPredicateThroughProject extends Rule[LogicalPlan] {
115115
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
116-
case filter@Filter(condition, project@Project(fields, grandChild)) =>
117-
val sourceAliases = fields.collect { case a@Alias(c, _) =>
116+
case filter @ Filter(condition, project @ Project(fields, grandChild)) =>
117+
val sourceAliases = fields.collect { case a @ Alias(c, _) =>
118118
(a.toAttribute: Attribute) -> c
119119
}.toMap
120120
project.copy(child = filter.copy(

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql
1919
package catalyst
2020
package plans
2121

22-
import org.apache.spark.sql.catalyst.expressions.{Expression, Attribute}
22+
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
2323
import org.apache.spark.sql.catalyst.trees.TreeNode
2424

2525
abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanType] {

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package catalyst
2020
package plans
2121
package physical
2222

23-
import org.apache.spark.sql.catalyst.expressions.{SortOrder, Expression}
23+
import org.apache.spark.sql.catalyst.expressions.{Expression, SortOrder}
2424
import org.apache.spark.sql.catalyst.types.IntegerType
2525

2626
/**

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,11 @@ package analysis
2121

2222
import org.scalatest.FunSuite
2323

24-
import org.apache.spark.sql.catalyst.dsl.expressions._
2524
import org.apache.spark.sql.catalyst.plans.logical._
2625

26+
/* Implicit conversions */
27+
import org.apache.spark.sql.catalyst.dsl.expressions._
28+
2729
class AnalysisSuite extends FunSuite {
2830
val analyze = SimpleAnalyzer
2931

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/HiveTypeCoercionSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ package analysis
2222
import org.scalatest.FunSuite
2323

2424
import org.apache.spark.sql.catalyst.types._
25-
import scala.Some
2625

2726
class HiveTypeCoercionSuite extends FunSuite {
2827

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,15 @@ package org.apache.spark.sql
1919
package catalyst
2020
package optimizer
2121

22-
import org.apache.spark.sql.catalyst.dsl.expressions._
2322
import org.apache.spark.sql.catalyst.dsl.plans._
2423
import org.apache.spark.sql.catalyst.expressions._
2524
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan}
2625
import org.apache.spark.sql.catalyst.rules.RuleExecutor
2726
import org.apache.spark.sql.catalyst.types.IntegerType
2827

28+
// For implicit conversions
29+
import org.apache.spark.sql.catalyst.dsl.expressions._
30+
2931
class ConstantFoldingSuite extends OptimizerTest {
3032

3133
object Optimize extends RuleExecutor[LogicalPlan] {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,9 @@ package optimizer
55
import org.apache.spark.sql.catalyst.plans.logical._
66
import org.apache.spark.sql.catalyst.rules._
77

8-
import dsl.plans._
9-
import dsl.expressions._
8+
/* Implicit conversions */
9+
import org.apache.spark.sql.catalyst.dsl.plans._
10+
import org.apache.spark.sql.catalyst.dsl.expressions._
1011

1112
class FilterPushdownSuite extends OptimizerTest {
1213

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizerTest.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@ import org.apache.spark.sql.catalyst.expressions._
88
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
99
import org.apache.spark.sql.catalyst.util._
1010

11-
/* Implicit conversions for creating query plans */
12-
1311
/**
1412
* Provides helper methods for comparing plans produced by optimization rules with the expected
1513
* result

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ package trees
2121

2222
import org.scalatest.FunSuite
2323

24-
import org.apache.spark.sql.catalyst.rules.{RuleExecutor, Rule}
25-
import org.apache.spark.sql.catalyst.expressions.{Literal, IntegerLiteral, Expression}
24+
import org.apache.spark.sql.catalyst.expressions.{Expression, IntegerLiteral, Literal}
25+
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
2626

2727
class RuleExecutorSuite extends FunSuite {
2828
object DecrementLiterals extends Rule[Expression] {

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ import org.scalatest.FunSuite
2424
import org.apache.spark.sql.catalyst.expressions._
2525

2626
class TreeNodeSuite extends FunSuite {
27-
2827
test("top node changed") {
2928
val after = Literal(1) transform { case Literal(1, _) => Literal(2) }
3029
assert(after === Literal(2))

0 commit comments

Comments
 (0)