|
| 1 | +package com.sparkbyexamples.spark.dataframe |
| 2 | +import org.apache.spark.sql.types.{IntegerType, _} |
| 3 | +object DataTypeExample extends App{ |
| 4 | + |
| 5 | + |
| 6 | + |
| 7 | + |
| 8 | + val typeFromJson = DataType.fromJson( |
| 9 | + """{"type":"array", |
| 10 | + |"elementType":"string","containsNull":false}""".stripMargin) |
| 11 | + println(typeFromJson.getClass) |
| 12 | + val typeFromJson2 = DataType.fromJson("\"string\"") |
| 13 | + println(typeFromJson2.getClass) |
| 14 | + |
| 15 | + val ddlSchemaStr = "`fullName` STRUCT<`first`: STRING, `last`: STRING," + |
| 16 | + "`middle`: STRING>,`age` INT,`gender` STRING" |
| 17 | + val ddlSchema = DataType.fromDDL(ddlSchemaStr) |
| 18 | + println(ddlSchema.getClass) |
| 19 | + //DataType.canWrite() |
| 20 | + //DataType.equalsStructurally() |
| 21 | + |
| 22 | + //StringType |
| 23 | + val stringType = DataTypes.StringType |
| 24 | + println("json : "+stringType.json) // Represents json string of datatype |
| 25 | + println("prettyJson : "+stringType.prettyJson) // Gets json in pretty format |
| 26 | + println("simpleString : "+stringType.simpleString) |
| 27 | + println("sql : "+stringType.sql) |
| 28 | + println("typeName : "+stringType.typeName) |
| 29 | + println("catalogString : "+stringType.catalogString) |
| 30 | + println("defaultSize : "+stringType.defaultSize) |
| 31 | + |
| 32 | + |
| 33 | + |
| 34 | + //ArrayType |
| 35 | + val arr = ArrayType(IntegerType,false) |
| 36 | + val arrayType = DataTypes.createArrayType(StringType,true) |
| 37 | + println("json() : "+arrayType.json) // Represents json string of datatype |
| 38 | + println("prettyJson() : "+arrayType.prettyJson) // Gets json in pretty format |
| 39 | + println("simpleString() : "+arrayType.simpleString) // simple string |
| 40 | + println("sql() : "+arrayType.sql) // SQL format |
| 41 | + println("typeName() : "+arrayType.typeName) // type name |
| 42 | + println("catalogString() : "+arrayType.catalogString) // catalog string |
| 43 | + println("defaultSize() : "+arrayType.defaultSize) // default size |
| 44 | + |
| 45 | + println("containsNull : "+arrayType.containsNull) |
| 46 | + println("elementType : "+arrayType.elementType) |
| 47 | + println("productElement : "+arrayType.productElement(0)) |
| 48 | + |
| 49 | + //MapType |
| 50 | + val mapType1 = MapType(StringType,IntegerType) |
| 51 | + val mapType = DataTypes.createMapType(StringType,IntegerType) |
| 52 | + println("keyType() : "+mapType.keyType) |
| 53 | + println("valueType : "+mapType.valueType) |
| 54 | + println("valueContainsNull : "+mapType.valueContainsNull) |
| 55 | + println("productElement : "+mapType.productElement(1)) |
| 56 | + |
| 57 | + //DateType |
| 58 | + val dateType1 = DateType() |
| 59 | + val dateType = DataTypes.DateType |
| 60 | + |
| 61 | + //TimestampType |
| 62 | + val timestampType1 = TimestampType() |
| 63 | + val timestampType = DataTypes.TimestampType |
| 64 | + |
| 65 | + //StructType |
| 66 | + val structType = DataTypes.createStructType( |
| 67 | + Array(DataTypes.createStructField("fieldName",StringType,true))) |
| 68 | + |
| 69 | + val simpleSchema = StructType(Array( |
| 70 | + StructField("name",StringType,true), |
| 71 | + StructField("id", IntegerType, true), |
| 72 | + StructField("gender", StringType, true), |
| 73 | + StructField("salary", DoubleType, true) |
| 74 | + )) |
| 75 | + |
| 76 | + val anotherSchema = new StructType() |
| 77 | + .add("name",new StructType() |
| 78 | + .add("firstname",StringType) |
| 79 | + .add("lastname",StringType)) |
| 80 | + .add("id",IntegerType) |
| 81 | + .add("salary",DoubleType) |
| 82 | + |
| 83 | + anotherSchema. |
| 84 | +} |
0 commit comments