public class EsStorage
extends org.apache.pig.LoadFunc
implements org.apache.pig.LoadMetadata, org.apache.pig.LoadPushDown, org.apache.pig.StoreFuncInterface, org.apache.pig.StoreMetadata
A = LOAD 'twitter/_search?q=kimchy' USING org.elasticsearch.hadoop.pig.ESStorage();
STORE A INTO 'The ElasticSearch host/port can be specified through Hadoop properties (see package description) or passed to the' USING org.elasticsearch.hadoop.pig.ESStorage();
EsStorage(String...)
constructor.Constructor and Description |
---|
EsStorage() |
EsStorage(java.lang.String... configuration) |
Modifier and Type | Method and Description |
---|---|
void |
checkSchema(org.apache.pig.ResourceSchema s) |
void |
cleanupOnFailure(java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
void |
cleanupOnSuccess(java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
java.util.List<org.apache.pig.LoadPushDown.OperatorSet> |
getFeatures() |
org.apache.hadoop.mapreduce.InputFormat |
getInputFormat() |
org.apache.pig.data.Tuple |
getNext() |
org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.Map<org.apache.hadoop.io.Writable,org.apache.hadoop.io.Writable>> |
getOutputFormat() |
java.lang.String[] |
getPartitionKeys(java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
org.apache.pig.ResourceSchema |
getSchema(java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
org.apache.pig.ResourceStatistics |
getStatistics(java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
void |
prepareToRead(org.apache.hadoop.mapreduce.RecordReader reader,
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit split) |
void |
prepareToWrite(org.apache.hadoop.mapreduce.RecordWriter writer) |
org.apache.pig.LoadPushDown.RequiredFieldResponse |
pushProjection(org.apache.pig.LoadPushDown.RequiredFieldList requiredFieldList) |
void |
putNext(org.apache.pig.data.Tuple t) |
java.lang.String |
relativeToAbsolutePath(java.lang.String location,
org.apache.hadoop.fs.Path curDir) |
java.lang.String |
relToAbsPathForStoreLocation(java.lang.String location,
org.apache.hadoop.fs.Path curDir) |
void |
setLocation(java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
void |
setPartitionFilter(org.apache.pig.Expression partitionFilter) |
void |
setStoreFuncUDFContextSignature(java.lang.String signature) |
void |
setStoreLocation(java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
void |
setUDFContextSignature(java.lang.String signature) |
void |
storeSchema(org.apache.pig.ResourceSchema schema,
java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
void |
storeStatistics(org.apache.pig.ResourceStatistics stats,
java.lang.String location,
org.apache.hadoop.mapreduce.Job job) |
public EsStorage()
public EsStorage(java.lang.String... configuration)
public java.lang.String relToAbsPathForStoreLocation(java.lang.String location, org.apache.hadoop.fs.Path curDir) throws java.io.IOException
relToAbsPathForStoreLocation
in interface org.apache.pig.StoreFuncInterface
java.io.IOException
public void setStoreFuncUDFContextSignature(java.lang.String signature)
setStoreFuncUDFContextSignature
in interface org.apache.pig.StoreFuncInterface
public void checkSchema(org.apache.pig.ResourceSchema s) throws java.io.IOException
checkSchema
in interface org.apache.pig.StoreFuncInterface
java.io.IOException
public void setStoreLocation(java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
setStoreLocation
in interface org.apache.pig.StoreFuncInterface
java.io.IOException
public org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.Map<org.apache.hadoop.io.Writable,org.apache.hadoop.io.Writable>> getOutputFormat() throws java.io.IOException
getOutputFormat
in interface org.apache.pig.StoreFuncInterface
java.io.IOException
public void prepareToWrite(org.apache.hadoop.mapreduce.RecordWriter writer) throws java.io.IOException
prepareToWrite
in interface org.apache.pig.StoreFuncInterface
java.io.IOException
public void putNext(org.apache.pig.data.Tuple t) throws java.io.IOException
putNext
in interface org.apache.pig.StoreFuncInterface
java.io.IOException
public void cleanupOnFailure(java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
cleanupOnFailure
in interface org.apache.pig.StoreFuncInterface
java.io.IOException
public void cleanupOnSuccess(java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
cleanupOnSuccess
in interface org.apache.pig.StoreFuncInterface
java.io.IOException
public void storeStatistics(org.apache.pig.ResourceStatistics stats, java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
storeStatistics
in interface org.apache.pig.StoreMetadata
java.io.IOException
public void storeSchema(org.apache.pig.ResourceSchema schema, java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
storeSchema
in interface org.apache.pig.StoreMetadata
java.io.IOException
public void setLocation(java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
setLocation
in class org.apache.pig.LoadFunc
java.io.IOException
public java.lang.String relativeToAbsolutePath(java.lang.String location, org.apache.hadoop.fs.Path curDir) throws java.io.IOException
relativeToAbsolutePath
in class org.apache.pig.LoadFunc
java.io.IOException
public org.apache.hadoop.mapreduce.InputFormat getInputFormat() throws java.io.IOException
getInputFormat
in class org.apache.pig.LoadFunc
java.io.IOException
public void prepareToRead(org.apache.hadoop.mapreduce.RecordReader reader, org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit split) throws java.io.IOException
prepareToRead
in class org.apache.pig.LoadFunc
java.io.IOException
public org.apache.pig.data.Tuple getNext() throws java.io.IOException
getNext
in class org.apache.pig.LoadFunc
java.io.IOException
public java.util.List<org.apache.pig.LoadPushDown.OperatorSet> getFeatures()
getFeatures
in interface org.apache.pig.LoadPushDown
public org.apache.pig.LoadPushDown.RequiredFieldResponse pushProjection(org.apache.pig.LoadPushDown.RequiredFieldList requiredFieldList) throws org.apache.pig.impl.logicalLayer.FrontendException
pushProjection
in interface org.apache.pig.LoadPushDown
org.apache.pig.impl.logicalLayer.FrontendException
public org.apache.pig.ResourceSchema getSchema(java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
getSchema
in interface org.apache.pig.LoadMetadata
java.io.IOException
public org.apache.pig.ResourceStatistics getStatistics(java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
getStatistics
in interface org.apache.pig.LoadMetadata
java.io.IOException
public java.lang.String[] getPartitionKeys(java.lang.String location, org.apache.hadoop.mapreduce.Job job) throws java.io.IOException
getPartitionKeys
in interface org.apache.pig.LoadMetadata
java.io.IOException
public void setPartitionFilter(org.apache.pig.Expression partitionFilter) throws java.io.IOException
setPartitionFilter
in interface org.apache.pig.LoadMetadata
java.io.IOException
public void setUDFContextSignature(java.lang.String signature)
setUDFContextSignature
in class org.apache.pig.LoadFunc