Search in sources :

Example 16 with Mutation

use of org.apache.hadoop.hbase.client.Mutation in project flink by apache.

the class HBaseWriteExample method main.

// *************************************************************************
//     PROGRAM
// *************************************************************************
public static void main(String[] args) throws Exception {
    if (!parseParameters(args)) {
        return;
    }
    // set up the execution environment
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    // get input data
    DataSet<String> text = getTextDataSet(env);
    DataSet<Tuple2<String, Integer>> counts = // split up the lines in pairs (2-tuples) containing: (word,1)
    text.flatMap(new Tokenizer()).groupBy(0).sum(1);
    // emit result
    Job job = Job.getInstance();
    job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, outputTableName);
    // TODO is "mapred.output.dir" really useful?
    job.getConfiguration().set("mapred.output.dir", HBaseFlinkTestConstants.TMP_DIR);
    counts.map(new RichMapFunction<Tuple2<String, Integer>, Tuple2<Text, Mutation>>() {

        private transient Tuple2<Text, Mutation> reuse;

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            reuse = new Tuple2<Text, Mutation>();
        }

        @Override
        public Tuple2<Text, Mutation> map(Tuple2<String, Integer> t) throws Exception {
            reuse.f0 = new Text(t.f0);
            Put put = new Put(t.f0.getBytes(ConfigConstants.DEFAULT_CHARSET));
            put.add(HBaseFlinkTestConstants.CF_SOME, HBaseFlinkTestConstants.Q_SOME, Bytes.toBytes(t.f1));
            reuse.f1 = put;
            return reuse;
        }
    }).output(new HadoopOutputFormat<Text, Mutation>(new TableOutputFormat<Text>(), job));
    // execute program
    env.execute("WordCount (HBase sink) Example");
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Configuration(org.apache.flink.configuration.Configuration) Text(org.apache.hadoop.io.Text) Put(org.apache.hadoop.hbase.client.Put) TableOutputFormat(org.apache.hadoop.hbase.mapreduce.TableOutputFormat) Tuple2(org.apache.flink.api.java.tuple.Tuple2) RichMapFunction(org.apache.flink.api.common.functions.RichMapFunction) Mutation(org.apache.hadoop.hbase.client.Mutation) Job(org.apache.hadoop.mapreduce.Job)

Example 17 with Mutation

use of org.apache.hadoop.hbase.client.Mutation in project beam by apache.

the class HBaseMutationCoderTest method testMutationEncoding.

@Test
public void testMutationEncoding() throws Exception {
    Mutation put = new Put("1".getBytes());
    CoderProperties.structuralValueDecodeEncodeEqual(CODER, put);
    Mutation delete = new Delete("1".getBytes());
    CoderProperties.structuralValueDecodeEncodeEqual(CODER, delete);
    Mutation increment = new Increment("1".getBytes());
    thrown.expect(IllegalArgumentException.class);
    thrown.expectMessage("Only Put and Delete are supported");
    CoderProperties.coderDecodeEncodeEqual(CODER, increment);
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Increment(org.apache.hadoop.hbase.client.Increment) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test)

Example 18 with Mutation

use of org.apache.hadoop.hbase.client.Mutation in project beam by apache.

the class HBaseIOTest method makeBadWrite.

private static KV<byte[], Iterable<Mutation>> makeBadWrite(String key) {
    Put put = new Put(key.getBytes());
    List<Mutation> mutations = new ArrayList<>();
    mutations.add(put);
    return KV.of(key.getBytes(StandardCharsets.UTF_8), (Iterable<Mutation>) mutations);
}
Also used : ArrayList(java.util.ArrayList) Mutation(org.apache.hadoop.hbase.client.Mutation) Put(org.apache.hadoop.hbase.client.Put)

Example 19 with Mutation

use of org.apache.hadoop.hbase.client.Mutation in project beam by apache.

the class HBaseIOTest method writeData.

/**
     * Helper function to create a table and return the rows that it created.
     */
private static void writeData(String tableId, int numRows) throws Exception {
    Connection connection = admin.getConnection();
    TableName tableName = TableName.valueOf(tableId);
    BufferedMutator mutator = connection.getBufferedMutator(tableName);
    List<Mutation> mutations = makeTableData(numRows);
    mutator.mutate(mutations);
    mutator.flush();
    mutator.close();
}
Also used : TableName(org.apache.hadoop.hbase.TableName) BufferedMutator(org.apache.hadoop.hbase.client.BufferedMutator) Connection(org.apache.hadoop.hbase.client.Connection) Mutation(org.apache.hadoop.hbase.client.Mutation)

Example 20 with Mutation

use of org.apache.hadoop.hbase.client.Mutation in project gora by apache.

the class HBaseTableConnection method flushCommits.

public void flushCommits() throws IOException {
    BufferedMutator bufMutator = connection.getBufferedMutator(this.tableName);
    for (ConcurrentLinkedQueue<Mutation> buffer : bPool) {
        while (!buffer.isEmpty()) {
            Mutation m = buffer.poll();
            bufMutator.mutate(m);
        }
    }
    bufMutator.flush();
    bufMutator.close();
}
Also used : BufferedMutator(org.apache.hadoop.hbase.client.BufferedMutator) Mutation(org.apache.hadoop.hbase.client.Mutation)

Aggregations

Mutation (org.apache.hadoop.hbase.client.Mutation)139 Put (org.apache.hadoop.hbase.client.Put)53 ArrayList (java.util.ArrayList)46 IOException (java.io.IOException)35 Delete (org.apache.hadoop.hbase.client.Delete)32 ImmutableBytesPtr (org.apache.phoenix.hbase.index.util.ImmutableBytesPtr)31 List (java.util.List)28 Cell (org.apache.hadoop.hbase.Cell)25 Pair (org.apache.hadoop.hbase.util.Pair)23 MetaDataMutationResult (org.apache.phoenix.coprocessor.MetaDataProtocol.MetaDataMutationResult)23 HashMap (java.util.HashMap)19 PTable (org.apache.phoenix.schema.PTable)18 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)17 MetaDataResponse (org.apache.phoenix.coprocessor.generated.MetaDataProtos.MetaDataResponse)15 Region (org.apache.hadoop.hbase.regionserver.Region)14 RowLock (org.apache.hadoop.hbase.regionserver.Region.RowLock)14 Test (org.junit.Test)14 MutationCode (org.apache.phoenix.coprocessor.MetaDataProtocol.MutationCode)13 HTableInterface (org.apache.hadoop.hbase.client.HTableInterface)12 MutationProto (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto)12