public class OrcRelation
extends java.lang.Object
Constructor and Description |
---|
OrcRelation() |
Modifier and Type | Method and Description |
---|---|
static scala.collection.immutable.Map<java.lang.String,java.lang.String> |
extensionsForCompressionCodecNames() |
static DataType |
inspectorToDataType(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector inspector) |
static DataType |
javaClassToDataType(java.lang.Class<?> clz) |
static java.lang.String |
ORC_COMPRESSION() |
static void |
setRequiredColumns(org.apache.hadoop.conf.Configuration conf,
StructType physicalSchema,
StructType requestedSchema) |
static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector |
toInspector(DataType dataType) |
static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector |
toInspector(org.apache.spark.sql.catalyst.expressions.Expression expr) |
static org.apache.spark.sql.hive.HiveInspectors.typeInfoConversions |
typeInfoConversions(DataType dt) |
static java.lang.Object |
unwrap(java.lang.Object data,
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector oi) |
static scala.collection.Iterator<org.apache.spark.sql.catalyst.InternalRow> |
unwrapOrcStructs(org.apache.hadoop.conf.Configuration conf,
StructType dataSchema,
scala.Option<org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector> maybeStructOI,
scala.collection.Iterator<org.apache.hadoop.io.Writable> iterator) |
static scala.Function3<java.lang.Object,org.apache.spark.sql.catalyst.expressions.MutableRow,java.lang.Object,scala.runtime.BoxedUnit> |
unwrapperFor(org.apache.hadoop.hive.serde2.objectinspector.StructField field) |
static java.lang.Object[] |
wrap(org.apache.spark.sql.catalyst.InternalRow row,
scala.collection.Seq<org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector> inspectors,
java.lang.Object[] cache,
DataType[] dataTypes) |
static java.lang.Object |
wrap(java.lang.Object a,
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector oi,
DataType dataType) |
static java.lang.Object[] |
wrap(scala.collection.Seq<java.lang.Object> row,
scala.collection.Seq<org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector> inspectors,
java.lang.Object[] cache,
DataType[] dataTypes) |
protected static scala.Function1<java.lang.Object,java.lang.Object> |
wrapperFor(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector oi,
DataType dataType) |
public static java.lang.String ORC_COMPRESSION()
public static scala.collection.immutable.Map<java.lang.String,java.lang.String> extensionsForCompressionCodecNames()
public static scala.collection.Iterator<org.apache.spark.sql.catalyst.InternalRow> unwrapOrcStructs(org.apache.hadoop.conf.Configuration conf, StructType dataSchema, scala.Option<org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector> maybeStructOI, scala.collection.Iterator<org.apache.hadoop.io.Writable> iterator)
public static void setRequiredColumns(org.apache.hadoop.conf.Configuration conf, StructType physicalSchema, StructType requestedSchema)
public static DataType javaClassToDataType(java.lang.Class<?> clz)
public static java.lang.Object unwrap(java.lang.Object data, org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector oi)
protected static scala.Function1<java.lang.Object,java.lang.Object> wrapperFor(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector oi, DataType dataType)
public static scala.Function3<java.lang.Object,org.apache.spark.sql.catalyst.expressions.MutableRow,java.lang.Object,scala.runtime.BoxedUnit> unwrapperFor(org.apache.hadoop.hive.serde2.objectinspector.StructField field)
public static java.lang.Object wrap(java.lang.Object a, org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector oi, DataType dataType)
public static java.lang.Object[] wrap(org.apache.spark.sql.catalyst.InternalRow row, scala.collection.Seq<org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector> inspectors, java.lang.Object[] cache, DataType[] dataTypes)
public static java.lang.Object[] wrap(scala.collection.Seq<java.lang.Object> row, scala.collection.Seq<org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector> inspectors, java.lang.Object[] cache, DataType[] dataTypes)
public static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector toInspector(DataType dataType)
public static org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector toInspector(org.apache.spark.sql.catalyst.expressions.Expression expr)
public static DataType inspectorToDataType(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector inspector)
public static org.apache.spark.sql.hive.HiveInspectors.typeInfoConversions typeInfoConversions(DataType dt)