public class HiveShim
extends java.lang.Object
| Modifier and Type | Class and Description | 
|---|---|
static class  | 
HiveShim.HiveFunctionWrapper$  | 
| Constructor and Description | 
|---|
HiveShim()  | 
| Modifier and Type | Method and Description | 
|---|---|
static void | 
appendReadColumns(org.apache.hadoop.conf.Configuration conf,
                 scala.collection.Seq<java.lang.Integer> ids,
                 scala.collection.Seq<java.lang.String> names)  | 
static java.lang.String | 
HIVE_GENERIC_UDF_MACRO_CLS()  | 
static org.apache.hadoop.io.Writable | 
prepareWritable(org.apache.hadoop.io.Writable w,
               scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>> serDeProps)  | 
static Decimal | 
toCatalystDecimal(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector hdoi,
                 java.lang.Object data)  | 
static int | 
UNLIMITED_DECIMAL_PRECISION()  | 
static int | 
UNLIMITED_DECIMAL_SCALE()  | 
static org.apache.hadoop.hive.ql.plan.FileSinkDesc | 
wrapperToFileSinkDesc(org.apache.spark.sql.hive.HiveShim.ShimFileSinkDesc w)  | 
public static int UNLIMITED_DECIMAL_PRECISION()
public static int UNLIMITED_DECIMAL_SCALE()
public static java.lang.String HIVE_GENERIC_UDF_MACRO_CLS()
public static void appendReadColumns(org.apache.hadoop.conf.Configuration conf,
                     scala.collection.Seq<java.lang.Integer> ids,
                     scala.collection.Seq<java.lang.String> names)
public static org.apache.hadoop.io.Writable prepareWritable(org.apache.hadoop.io.Writable w,
                                            scala.collection.Seq<scala.Tuple2<java.lang.String,java.lang.String>> serDeProps)
public static Decimal toCatalystDecimal(org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector hdoi, java.lang.Object data)
public static org.apache.hadoop.hive.ql.plan.FileSinkDesc wrapperToFileSinkDesc(org.apache.spark.sql.hive.HiveShim.ShimFileSinkDesc w)