@@ -21,15 +21,14 @@ import java.net.URI
21
21
import java .sql .{Connection , DatabaseMetaData , Driver , DriverManager , ResultSet , ResultSetMetaData ,
22
22
SQLException }
23
23
24
- import collection .JavaConverters ._
24
+ import scala . collection .JavaConverters ._
25
25
import scala .collection .mutable .ArrayBuffer
26
26
27
- import org .slf4j .LoggerFactory
28
-
29
- import org .apache .hadoop .hive .llap .FieldDesc
30
27
import org .apache .hadoop .hive .serde2 .objectinspector .ObjectInspector .Category
31
28
import org .apache .hadoop .hive .serde2 .objectinspector .PrimitiveObjectInspector .PrimitiveCategory
32
29
import org .apache .hadoop .hive .serde2 .typeinfo ._
30
+ import org .slf4j .LoggerFactory
31
+
33
32
import org .apache .spark .sql .types ._
34
33
35
34
@@ -259,43 +258,48 @@ class JDBCWrapper {
259
258
260
259
private def getCatalystType (typeInfo : TypeInfo ) : DataType = {
261
260
typeInfo.getCategory match {
262
- case Category .PRIMITIVE => getCatalystType(typeInfo.asInstanceOf [PrimitiveTypeInfo ])
263
- case Category .LIST => ArrayType (
264
- getCatalystType(typeInfo.asInstanceOf [ListTypeInfo ].getListElementTypeInfo))
265
- case Category .MAP => MapType (
261
+ case Category .PRIMITIVE =>
262
+ getCatalystType(typeInfo.asInstanceOf [PrimitiveTypeInfo ])
263
+ case Category .LIST =>
264
+ ArrayType (getCatalystType(typeInfo.asInstanceOf [ListTypeInfo ].getListElementTypeInfo))
265
+ case Category .MAP =>
266
+ MapType (
266
267
getCatalystType(typeInfo.asInstanceOf [MapTypeInfo ].getMapKeyTypeInfo),
267
268
getCatalystType(typeInfo.asInstanceOf [MapTypeInfo ].getMapValueTypeInfo))
268
- case Category .STRUCT => StructType (getCatalystStructFields(typeInfo.asInstanceOf [StructTypeInfo ]))
269
- case _ => throw new SQLException (" Unsupported type " + typeInfo)
269
+ case Category .STRUCT =>
270
+ StructType (getCatalystStructFields(typeInfo.asInstanceOf [StructTypeInfo ]))
271
+ case _ =>
272
+ throw new SQLException (" Unsupported type " + typeInfo)
270
273
}
271
274
}
272
275
273
276
private def getCatalystType (primitiveTypeInfo : PrimitiveTypeInfo ) : DataType = {
274
277
primitiveTypeInfo.getPrimitiveCategory match {
275
- case PrimitiveCategory .BOOLEAN => BooleanType
276
- case PrimitiveCategory .BYTE => ByteType
277
- case PrimitiveCategory .SHORT => ShortType
278
- case PrimitiveCategory .INT => IntegerType
279
- case PrimitiveCategory .LONG => LongType
280
- case PrimitiveCategory .FLOAT => FloatType
281
- case PrimitiveCategory .DOUBLE => DoubleType
282
- case PrimitiveCategory .STRING => StringType
283
- case PrimitiveCategory .CHAR => StringType
284
- case PrimitiveCategory .VARCHAR => StringType
285
- case PrimitiveCategory .DATE => DateType
278
+ case PrimitiveCategory .BOOLEAN => BooleanType
279
+ case PrimitiveCategory .BYTE => ByteType
280
+ case PrimitiveCategory .SHORT => ShortType
281
+ case PrimitiveCategory .INT => IntegerType
282
+ case PrimitiveCategory .LONG => LongType
283
+ case PrimitiveCategory .FLOAT => FloatType
284
+ case PrimitiveCategory .DOUBLE => DoubleType
285
+ case PrimitiveCategory .STRING => StringType
286
+ case PrimitiveCategory .CHAR => StringType
287
+ case PrimitiveCategory .VARCHAR => StringType
288
+ case PrimitiveCategory .DATE => DateType
286
289
case PrimitiveCategory .TIMESTAMP => TimestampType
287
- case PrimitiveCategory .BINARY => BinaryType
288
- case PrimitiveCategory .DECIMAL => DecimalType (
290
+ case PrimitiveCategory .BINARY => BinaryType
291
+ case PrimitiveCategory .DECIMAL => DecimalType (
289
292
primitiveTypeInfo.asInstanceOf [DecimalTypeInfo ].getPrecision,
290
293
primitiveTypeInfo.asInstanceOf [DecimalTypeInfo ].getScale)
291
294
case _ => throw new SQLException (" Unsupported type " + primitiveTypeInfo)
292
295
}
293
296
}
294
297
295
298
private def getCatalystStructFields (structTypeInfo : StructTypeInfo ) : Array [StructField ] = {
296
- structTypeInfo.getAllStructFieldNames.asScala.zip(structTypeInfo.getAllStructFieldTypeInfos.asScala).map(
299
+ structTypeInfo.getAllStructFieldNames.asScala
300
+ .zip(structTypeInfo.getAllStructFieldTypeInfos.asScala).map(
297
301
{ case (fieldName, fieldType) => new StructField (fieldName, getCatalystType(fieldType)) }
298
- ).toArray
302
+ ).toArray
299
303
}
300
304
301
305
private def getCatalystType (typeName : String ) : DataType = {
0 commit comments