org.apache.hadoop.hive.metastore.IMetaStoreClient.addDynamicPartitions(long,
String, String, List<String>)
in Hive 1.3.0/2.1.0 - will be removed in 2 releases
|
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.addDynamicPartitions(long,
String, String, List<String>) |
org.apache.hadoop.hive.serde2.ColumnProjectionUtils.appendReadColumnIDs(Configuration,
List<Integer>)
for backwards compatibility with <= 0.12, use appendReadColumns
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.bucketCols(List<String>,
int) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.collectionItemsTerminatedBy(char) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.comments(String) |
org.apache.hadoop.hive.metastore.IMetaStoreClient.compact(String, String,
String, CompactionType) |
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.compact(String, String,
String, CompactionType) |
org.apache.hadoop.hive.ql.metadata.HiveStorageHandler.configureTableJobProperties(TableDesc,
Map<String, String>) |
org.apache.orc.impl.InStream.create(String,
ByteBuffer[], long[], long, CompressionCodec, int) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.create(String, String,
List<HCatFieldSchema>) |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.create(String, String,
String, Map<String, String>) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createColumnarStructInspector(List<String>,
List<TypeInfo>, byte[], Text, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyObjectInspector(TypeInfo,
byte[], int, Text, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyObjectInspector(TypeInfo,
byte[], int, Text, boolean, byte, boolean) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyObjectInspector(TypeInfo,
byte[], int, Text, boolean, byte, boolean,
ObjectInspectorFactory.ObjectInspectorOptions) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyObjectInspector(TypeInfo,
byte[], int, Text, boolean, byte,
ObjectInspectorFactory.ObjectInspectorOptions) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyStructInspector(List<String>,
List<TypeInfo>, byte[], Text, boolean, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.LazyFactory.createLazyStructInspector(List<String>,
List<TypeInfo>, byte[], Text, boolean, boolean, byte, boolean) |
org.apache.hadoop.hive.metastore.IMetaStoreClient.dropTable(String,
boolean)
As of release 0.6.0 replaced by IMetaStoreClient.dropTable(String, String,
boolean, boolean). This method will be removed in release
0.7.0.
|
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.dropTable(String,
boolean) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.escapeChar(char) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.fieldsTerminatedBy(char) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.fileFormat(String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getBucketCols() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getCols() |
org.apache.hadoop.hive.serde2.dynamic_type.SimpleCharStream.getColumn() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getComments() |
org.apache.hadoop.hive.ql.io.RCFile.Writer.getCompressionCodec() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getDatabaseName() |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.getDatabaseName() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getExternal() |
org.apache.hadoop.hive.ql.exec.Utilities.getFileExtension(JobConf,
boolean)
Use Utilities.getFileExtension(JobConf, boolean,
HiveOutputFormat)
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getFileFormat() |
org.apache.hive.hcatalog.common.HCatUtil.getHiveClient(HiveConf) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleListObjectInspector(ObjectInspector,
byte, Text, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleMapObjectInspector(ObjectInspector,
ObjectInspector, byte, byte, Text, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(List<String>,
List<ObjectInspector>, byte, Text, boolean, boolean, byte) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(List<String>,
List<ObjectInspector>, byte, Text, boolean, boolean, byte,
ObjectInspectorFactory.ObjectInspectorOptions) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(List<String>,
List<ObjectInspector>, List<String>, byte, Text, boolean, boolean,
byte) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazySimpleStructObjectInspector(List<String>,
List<ObjectInspector>, List<String>, byte, Text, boolean, boolean, byte,
ObjectInspectorFactory.ObjectInspectorOptions) |
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory.getLazyUnionObjectInspector(List<ObjectInspector>,
byte, Text, boolean, byte) |
org.apache.hadoop.hive.serde2.dynamic_type.SimpleCharStream.getLine() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getLocation() |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.getLocation() |
org.apache.hive.hcatalog.data.schema.HCatFieldSchema.getMapKeyType()
as of 0.13, slated for removal with 0.15 use HCatFieldSchema.getMapKeyTypeInfo() instead
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getNumBuckets() |
org.apache.hadoop.hive.ql.udf.generic.SimpleGenericUDAFParameterInfo.getParameters() |
org.apache.hadoop.hive.ql.udf.generic.GenericUDAFParameterInfo.getParameters() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getPartitionCols() |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.getPartitionSpec() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getSerdeParams() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getSortCols() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getStorageHandler() |
org.apache.hadoop.hive.metastore.IMetaStoreClient.getTable(String)
As of release 0.6.0 replaced by IMetaStoreClient.getTable(String,
String). This method will be removed in release 0.7.0.
|
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.getTable(String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getTableName() |
org.apache.hive.hcatalog.api.HCatAddPartitionDesc.getTableName() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.getTblProps() |
org.apache.hive.hplsql.HplsqlParser.getTokenNames() |
org.apache.hive.hplsql.HplsqlLexer.getTokenNames() |
org.apache.hive.hcatalog.data.schema.HCatFieldSchema.getType()
as of 0.13, slated for removal with 0.15 use HCatFieldSchema.getTypeInfo()
instead
|
org.apache.orc.Reader.getTypes()
use getSchema instead
|
org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(Configuration,
Properties) |
org.apache.hadoop.hive.serde2.AbstractEncodingAwareSerDe.initialize(Configuration,
Properties) |
org.apache.hadoop.hive.ql.udf.generic.GenericUDTF.initialize(ObjectInspector[]) |
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.initSerdeParams(Configuration,
Properties, String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.isTableExternal(boolean) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.linesTerminatedBy(char) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.location(String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.mapKeysTerminatedBy(char) |
org.apache.hive.hcatalog.streaming.HiveEndPoint.newConnection(boolean)
As of release 1.3/2.1. Replaced by HiveEndPoint.newConnection(boolean,
String)
|
org.apache.hive.hcatalog.streaming.HiveEndPoint.newConnection(boolean,
HiveConf)
As of release 1.3/2.1. Replaced by HiveEndPoint.newConnection(boolean,
HiveConf, String)
|
org.apache.hive.hcatalog.streaming.HiveEndPoint.newConnection(boolean,
HiveConf, UserGroupInformation)
As of release 1.3/2.1. Replaced by HiveEndPoint.newConnection(boolean,
HiveConf, UserGroupInformation, String)
|
org.apache.hadoop.hive.ql.io.RCFile.Reader.nextColumnsBatch() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.nullDefinedAs(char) |
org.apache.hive.service.cli.CLIService.openSession(TProtocolVersion, String,
String, Map<String, String>)
Use CLIService.openSession(TProtocolVersion,
String, String, String, Map)
|
org.apache.hive.service.cli.CLIService.openSessionWithImpersonation(TProtocolVersion,
String, String, Map<String, String>, String)
Use CLIService.openSessionWithImpersonation(TProtocolVersion, String,
String, String, Map, String)
|
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.partCols(List<HCatFieldSchema>) |
org.apache.hadoop.hive.ql.io.parquet.ProjectionPusher.pushProjectionsAndFilters(JobConf,
Path) |
org.apache.hadoop.hive.ql.io.NonSyncDataInputBuffer.readLine()
Use BufferedReader
|
org.apache.hadoop.hive.ql.hooks.PostExecute.run(SessionState,
Set<ReadEntity>, Set<WriteEntity>, LineageInfo,
UserGroupInformation) |
org.apache.hadoop.hive.ql.hooks.PreExecute.run(SessionState,
Set<ReadEntity>, Set<WriteEntity>, UserGroupInformation) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.serdeParam(String,
String) |
org.apache.hadoop.hive.ql.io.RCFile.ValueBuffer.setColumnValueBuffer(NonSyncDataOutputBuffer,
int) |
org.apache.hive.hcatalog.mapreduce.HCatInputFormat.setFilter(String)
as of 0.13, slated for removal with 0.15 Use HCatInputFormat.setInput(org.apache.hadoop.conf.Configuration, String,
String, String) instead, to specify a partition filter to
directly initialize the input with.
|
org.apache.hadoop.hive.serde2.ColumnProjectionUtils.setFullyReadColumns(Configuration)
for backwards compatibility with <= 0.12, use setReadAllColumns
|
org.apache.hadoop.hive.serde2.ColumnProjectionUtils.setReadColumnIDs(Configuration,
List<Integer>)
for backwards compatibility with <= 0.12, use setReadAllColumns and
appendReadColumns
|
org.apache.hadoop.hive.metastore.IMetaStoreClient.showLocks() |
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.showLocks() |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.sortCols(ArrayList<Order>) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.storageHandler(String) |
org.apache.hadoop.hive.metastore.IMetaStoreClient.tableExists(String)
As of release 0.6.0 replaced by IMetaStoreClient.tableExists(String,
String). This method will be removed in release 0.7.0.
|
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.tableExists(String) |
org.apache.hive.hcatalog.api.HCatCreateTableDesc.Builder.tblProps(Map<String,
String>) |
org.apache.hadoop.hive.metastore.ObjectStore.updateMStorageDescriptorTblPropURI(URI,
URI, String, boolean) |
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.updatePartitionColumnStatistics(ColumnStatistics) |
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.updateTableColumnStatistics(ColumnStatistics) |