public class GiraphHCatInputFormat
extends org.apache.hcatalog.mapreduce.HCatBaseInputFormat
HCatInputFormat
,
but allows for different data sources (vertex and edge data).Modifier and Type | Field and Description |
---|---|
static String |
EDGE_INPUT_JOB_INFO
Edge input job info for HCatalog.
|
static String |
VERTEX_INPUT_JOB_INFO
Vertex input job info for HCatalog.
|
Constructor and Description |
---|
GiraphHCatInputFormat() |
Modifier and Type | Method and Description |
---|---|
org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.WritableComparable,org.apache.hcatalog.data.HCatRecord> |
createEdgeRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
org.apache.hadoop.mapreduce.TaskAttemptContext taskContext)
Create a
RecordReader for edges. |
org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.WritableComparable,org.apache.hcatalog.data.HCatRecord> |
createVertexRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
org.apache.hadoop.mapreduce.TaskAttemptContext taskContext)
Create a
RecordReader for vertices. |
List<org.apache.hadoop.mapreduce.InputSplit> |
getEdgeSplits(org.apache.hadoop.mapreduce.JobContext jobContext)
Get edge input splits.
|
static org.apache.hcatalog.data.schema.HCatSchema |
getEdgeTableSchema(org.apache.hadoop.conf.Configuration conf)
Get edge input table schema.
|
List<org.apache.hadoop.mapreduce.InputSplit> |
getVertexSplits(org.apache.hadoop.mapreduce.JobContext jobContext)
Get vertex input splits.
|
static org.apache.hcatalog.data.schema.HCatSchema |
getVertexTableSchema(org.apache.hadoop.conf.Configuration conf)
Get vertex input table schema.
|
static void |
setEdgeInput(org.apache.hadoop.mapreduce.Job job,
org.apache.hcatalog.mapreduce.InputJobInfo inputJobInfo)
Set edge
InputJobInfo . |
static void |
setVertexInput(org.apache.hadoop.mapreduce.Job job,
org.apache.hcatalog.mapreduce.InputJobInfo inputJobInfo)
Set vertex
InputJobInfo . |
public static final String VERTEX_INPUT_JOB_INFO
public static final String EDGE_INPUT_JOB_INFO
public static void setVertexInput(org.apache.hadoop.mapreduce.Job job, org.apache.hcatalog.mapreduce.InputJobInfo inputJobInfo) throws IOException
InputJobInfo
.job
- The jobinputJobInfo
- Vertex input job infoIOException
public static void setEdgeInput(org.apache.hadoop.mapreduce.Job job, org.apache.hcatalog.mapreduce.InputJobInfo inputJobInfo) throws IOException
InputJobInfo
.job
- The jobinputJobInfo
- Edge input job infoIOException
public static org.apache.hcatalog.data.schema.HCatSchema getVertexTableSchema(org.apache.hadoop.conf.Configuration conf) throws IOException
conf
- Job configurationIOException
public static org.apache.hcatalog.data.schema.HCatSchema getEdgeTableSchema(org.apache.hadoop.conf.Configuration conf) throws IOException
conf
- Job configurationIOException
public List<org.apache.hadoop.mapreduce.InputSplit> getVertexSplits(org.apache.hadoop.mapreduce.JobContext jobContext) throws IOException, InterruptedException
jobContext
- Job contextInputSplit
sIOException
InterruptedException
public List<org.apache.hadoop.mapreduce.InputSplit> getEdgeSplits(org.apache.hadoop.mapreduce.JobContext jobContext) throws IOException, InterruptedException
jobContext
- Job contextInputSplit
sIOException
InterruptedException
public org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.WritableComparable,org.apache.hcatalog.data.HCatRecord> createVertexRecordReader(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) throws IOException, InterruptedException
RecordReader
for vertices.split
- Input splittaskContext
- ContextIOException
InterruptedException
public org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.WritableComparable,org.apache.hcatalog.data.HCatRecord> createEdgeRecordReader(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.mapreduce.TaskAttemptContext taskContext) throws IOException, InterruptedException
RecordReader
for edges.split
- Input splittaskContext
- ContextIOException
InterruptedException
Copyright © 2011-2019 The Apache Software Foundation. All Rights Reserved.