-
Notifications
You must be signed in to change notification settings - Fork 28.6k
[SPARK-10495] [SQL] Read date values in JSON data stored by Spark 1.5.0. #8806
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -73,6 +73,38 @@ private[sql] object JacksonGenerator { | |
valWriter(field.dataType, v) | ||
} | ||
gen.writeEndObject() | ||
|
||
// For UDT, udt.serialize will produce SQL types. So, we need the following three cases. | ||
case (ArrayType(ty, _), v: ArrayData) => | ||
gen.writeStartArray() | ||
v.foreach(ty, (_, value) => valWriter(ty, value)) | ||
gen.writeEndArray() | ||
|
||
case (MapType(kt, vt, _), v: MapData) => | ||
gen.writeStartObject() | ||
v.foreach(kt, vt, { (k, v) => | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same as above. |
||
gen.writeFieldName(k.toString) | ||
valWriter(vt, v) | ||
}) | ||
gen.writeEndObject() | ||
|
||
case (StructType(ty), v: InternalRow) => | ||
gen.writeStartObject() | ||
var i = 0 | ||
while (i < ty.length) { | ||
val field = ty(i) | ||
val value = v.get(i, field.dataType) | ||
if (value != null) { | ||
gen.writeFieldName(field.name) | ||
valWriter(field.dataType, value) | ||
} | ||
i += 1 | ||
} | ||
gen.writeEndObject() | ||
|
||
case (dt, v) => | ||
sys.error( | ||
s"Failed to convert value $v (class of ${v.getClass}}) with the type of $dt to JSON.") | ||
} | ||
|
||
valWriter(rowSchema, row) | ||
|
@@ -133,6 +165,10 @@ private[sql] object JacksonGenerator { | |
i += 1 | ||
} | ||
gen.writeEndObject() | ||
|
||
case (dt, v) => | ||
sys.error( | ||
s"Failed to convert value $v (class of ${v.getClass}}) with the type of $dt to JSON.") | ||
} | ||
|
||
valWriter(rowSchema, row) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -62,10 +62,23 @@ private[sql] object JacksonParser { | |
// guard the non string type | ||
null | ||
|
||
case (VALUE_STRING, BinaryType) => | ||
parser.getBinaryValue | ||
|
||
case (VALUE_STRING, DateType) => | ||
DateTimeUtils.millisToDays(DateTimeUtils.stringToTime(parser.getText).getTime) | ||
val stringValue = parser.getText | ||
if (stringValue.contains("-")) { | ||
// The format of this string will probably be "yyyy-mm-dd". | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Not sure whether this is still true if we take different locales into consideration. Maybe use a |
||
DateTimeUtils.millisToDays(DateTimeUtils.stringToTime(parser.getText).getTime) | ||
} else { | ||
// In Spark 1.5.0, we store the data as number of days since epoch in string. | ||
// So, we just convert it to Int. | ||
stringValue.toInt | ||
} | ||
|
||
case (VALUE_STRING, TimestampType) => | ||
// This one will lose microseconds parts. | ||
// See https://issues.apache.org/jira/browse/SPARK-10681. | ||
DateTimeUtils.stringToTime(parser.getText).getTime * 1000L | ||
|
||
case (VALUE_NUMBER_INT, TimestampType) => | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Should probably replace this
foreach
with awhile
.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This foreach is provided by
ArrayData
.