public class TableInputFormat
extends org.apache.hadoop.mapreduce.InputFormat<org.ojai.Value,org.ojai.Document>
implements org.apache.hadoop.conf.Configurable
| Modifier and Type | Field and Description | 
|---|---|
protected com.mapr.db.impl.ConditionImpl | 
cond  | 
static java.lang.String | 
COND_OBJ  | 
static java.lang.String | 
EXCLUDE_EMBEDDEDFAMILY  | 
static java.lang.String | 
FIELD_PATH  | 
static java.lang.String | 
GET_DELETES  | 
static java.lang.String | 
INPUT_TABLE  | 
protected com.mapr.db.impl.MapRDBTableImpl | 
jTable  | 
static java.lang.String | 
READ_ALL_CFS  | 
static java.lang.String | 
START_ROW  | 
static java.lang.String | 
STOP_ROW  | 
| Constructor and Description | 
|---|
TableInputFormat()  | 
| Modifier and Type | Method and Description | 
|---|---|
org.apache.hadoop.mapreduce.RecordReader<org.ojai.Value,org.ojai.Document> | 
createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
                  org.apache.hadoop.mapreduce.TaskAttemptContext context)
Creates a record reader for a given split. 
 | 
org.apache.hadoop.conf.Configuration | 
getConf()
Returns the current configuration. 
 | 
java.util.List<org.apache.hadoop.mapreduce.InputSplit> | 
getSplits(org.apache.hadoop.mapreduce.JobContext context)
Set up the splits which will be served as inputs to map tasks. 
 | 
void | 
setConf(org.apache.hadoop.conf.Configuration arg0)
This function is used to set parameters in the configuration. 
 | 
protected com.mapr.db.impl.MapRDBTableImpl jTable
protected com.mapr.db.impl.ConditionImpl cond
public static final java.lang.String INPUT_TABLE
public static final java.lang.String FIELD_PATH
public static final java.lang.String COND_OBJ
public static final java.lang.String START_ROW
public static final java.lang.String STOP_ROW
public static final java.lang.String EXCLUDE_EMBEDDEDFAMILY
public static final java.lang.String GET_DELETES
public static final java.lang.String READ_ALL_CFS
public org.apache.hadoop.conf.Configuration getConf()
getConf in interface org.apache.hadoop.conf.ConfigurableConfigurable.getConf()public java.util.List<org.apache.hadoop.mapreduce.InputSplit> getSplits(org.apache.hadoop.mapreduce.JobContext context)
                                                                 throws java.io.IOException,
                                                                        java.lang.InterruptedException
getSplits in class org.apache.hadoop.mapreduce.InputFormat<org.ojai.Value,org.ojai.Document>context - The current job context.java.io.IOExceptionjava.lang.InterruptedExceptionInputFormat.getSplits(org.apache.hadoop.mapreduce.JobContext)public org.apache.hadoop.mapreduce.RecordReader<org.ojai.Value,org.ojai.Document> createRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
                                                                                                     org.apache.hadoop.mapreduce.TaskAttemptContext context)
                                                                                              throws java.io.IOException,
                                                                                                     java.lang.InterruptedException
createRecordReader in class org.apache.hadoop.mapreduce.InputFormat<org.ojai.Value,org.ojai.Document>split - The input split.context - Current job context.java.io.IOExceptionjava.lang.InterruptedExceptionInputFormat.createRecordReader(org.apache.hadoop.mapreduce.InputSplit, org.apache.hadoop.mapreduce.TaskAttemptContext)public void setConf(org.apache.hadoop.conf.Configuration arg0)
setConf in interface org.apache.hadoop.conf.Configurablearg0 - Configuration object with parameters for TableInputFormat.Configurable.setConf(
  org.apache.hadoop.conf.Configuration)