public static class SparkPigSplit.FileSparkPigSplit extends org.apache.hadoop.mapreduce.lib.input.FileSplit implements SparkPigSplit
SparkPigSplit.FileSparkPigSplit, SparkPigSplit.GenericSparkPigSplit| Constructor and Description |
|---|
FileSparkPigSplit()
Spark executor's deserializer calls this, and we have to instantiate a default wrapped object
|
FileSparkPigSplit(PigSplit pigSplit) |
| Modifier and Type | Method and Description |
|---|---|
boolean |
disableCounter() |
org.apache.hadoop.conf.Configuration |
getConf() |
long |
getLength() |
long |
getLength(int idx) |
org.apache.hadoop.mapred.SplitLocationInfo[] |
getLocationInfo() |
String[] |
getLocations() |
int |
getNumPaths() |
org.apache.hadoop.fs.Path |
getPath() |
int |
getSplitIndex() |
PigSplit |
getWrappedPigSplit() |
org.apache.hadoop.mapreduce.InputSplit |
getWrappedSplit() |
org.apache.hadoop.mapreduce.InputSplit |
getWrappedSplit(int idx) |
boolean |
isMultiInputs() |
void |
readFields(DataInput is) |
void |
setConf(org.apache.hadoop.conf.Configuration conf) |
void |
setCurrentIdx(int idx) |
void |
setDisableCounter(boolean disableCounter) |
void |
setMultiInputs(boolean b) |
String |
toString() |
void |
write(DataOutput os) |
public FileSparkPigSplit()
public FileSparkPigSplit(PigSplit pigSplit)
public org.apache.hadoop.mapred.SplitLocationInfo[] getLocationInfo()
throws IOException
getLocationInfo in interface SparkPigSplitgetLocationInfo in class org.apache.hadoop.mapreduce.lib.input.FileSplitIOExceptionpublic String toString()
toString in class org.apache.hadoop.mapreduce.lib.input.FileSplitpublic long getLength()
getLength in class org.apache.hadoop.mapreduce.lib.input.FileSplitpublic String[] getLocations() throws IOException
getLocations in class org.apache.hadoop.mapreduce.lib.input.FileSplitIOExceptionpublic org.apache.hadoop.mapreduce.InputSplit getWrappedSplit()
getWrappedSplit in interface SparkPigSplitpublic org.apache.hadoop.mapreduce.InputSplit getWrappedSplit(int idx)
getWrappedSplit in interface SparkPigSplitpublic long getLength(int idx)
throws IOException,
InterruptedException
getLength in interface SparkPigSplitIOExceptionInterruptedExceptionpublic void readFields(DataInput is) throws IOException
readFields in interface org.apache.hadoop.io.WritablereadFields in class org.apache.hadoop.mapreduce.lib.input.FileSplitIOExceptionpublic void write(DataOutput os) throws IOException
write in interface org.apache.hadoop.io.Writablewrite in class org.apache.hadoop.mapreduce.lib.input.FileSplitIOExceptionpublic int getSplitIndex()
getSplitIndex in interface SparkPigSplitpublic void setMultiInputs(boolean b)
setMultiInputs in interface SparkPigSplitpublic boolean isMultiInputs()
isMultiInputs in interface SparkPigSplitpublic org.apache.hadoop.conf.Configuration getConf()
getConf in interface org.apache.hadoop.conf.Configurablepublic void setConf(org.apache.hadoop.conf.Configuration conf)
setConf in interface org.apache.hadoop.conf.Configurablepublic int getNumPaths()
getNumPaths in interface SparkPigSplitpublic void setDisableCounter(boolean disableCounter)
setDisableCounter in interface SparkPigSplitpublic boolean disableCounter()
disableCounter in interface SparkPigSplitpublic void setCurrentIdx(int idx)
setCurrentIdx in interface SparkPigSplitpublic PigSplit getWrappedPigSplit()
getWrappedPigSplit in interface SparkPigSplitpublic org.apache.hadoop.fs.Path getPath()
getPath in class org.apache.hadoop.mapreduce.lib.input.FileSplitCopyright © 2007-2017 The Apache Software Foundation