use of org.apache.hadoop.hbase.client.Mutation in project flink by apache.
the class HBaseWriteExample method main.
// *************************************************************************
// PROGRAM
// *************************************************************************
public static void main(String[] args) throws Exception {
if (!parseParameters(args)) {
return;
}
// set up the execution environment
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
// get input data
DataSet<String> text = getTextDataSet(env);
DataSet<Tuple2<String, Integer>> counts = // split up the lines in pairs (2-tuples) containing: (word,1)
text.flatMap(new Tokenizer()).groupBy(0).sum(1);
// emit result
Job job = Job.getInstance();
job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, outputTableName);
// TODO is "mapred.output.dir" really useful?
job.getConfiguration().set("mapred.output.dir", HBaseFlinkTestConstants.TMP_DIR);
counts.map(new RichMapFunction<Tuple2<String, Integer>, Tuple2<Text, Mutation>>() {
private transient Tuple2<Text, Mutation> reuse;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
reuse = new Tuple2<Text, Mutation>();
}
@Override
public Tuple2<Text, Mutation> map(Tuple2<String, Integer> t) throws Exception {
reuse.f0 = new Text(t.f0);
Put put = new Put(t.f0.getBytes(ConfigConstants.DEFAULT_CHARSET));
put.add(HBaseFlinkTestConstants.CF_SOME, HBaseFlinkTestConstants.Q_SOME, Bytes.toBytes(t.f1));
reuse.f1 = put;
return reuse;
}
}).output(new HadoopOutputFormat<Text, Mutation>(new TableOutputFormat<Text>(), job));
// execute program
env.execute("WordCount (HBase sink) Example");
}
use of org.apache.hadoop.hbase.client.Mutation in project beam by apache.
the class HBaseMutationCoderTest method testMutationEncoding.
@Test
public void testMutationEncoding() throws Exception {
Mutation put = new Put("1".getBytes());
CoderProperties.structuralValueDecodeEncodeEqual(CODER, put);
Mutation delete = new Delete("1".getBytes());
CoderProperties.structuralValueDecodeEncodeEqual(CODER, delete);
Mutation increment = new Increment("1".getBytes());
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("Only Put and Delete are supported");
CoderProperties.coderDecodeEncodeEqual(CODER, increment);
}
use of org.apache.hadoop.hbase.client.Mutation in project beam by apache.
the class HBaseIOTest method makeBadWrite.
private static KV<byte[], Iterable<Mutation>> makeBadWrite(String key) {
Put put = new Put(key.getBytes());
List<Mutation> mutations = new ArrayList<>();
mutations.add(put);
return KV.of(key.getBytes(StandardCharsets.UTF_8), (Iterable<Mutation>) mutations);
}
use of org.apache.hadoop.hbase.client.Mutation in project beam by apache.
the class HBaseIOTest method writeData.
/**
* Helper function to create a table and return the rows that it created.
*/
private static void writeData(String tableId, int numRows) throws Exception {
Connection connection = admin.getConnection();
TableName tableName = TableName.valueOf(tableId);
BufferedMutator mutator = connection.getBufferedMutator(tableName);
List<Mutation> mutations = makeTableData(numRows);
mutator.mutate(mutations);
mutator.flush();
mutator.close();
}
use of org.apache.hadoop.hbase.client.Mutation in project gora by apache.
the class HBaseTableConnection method flushCommits.
public void flushCommits() throws IOException {
BufferedMutator bufMutator = connection.getBufferedMutator(this.tableName);
for (ConcurrentLinkedQueue<Mutation> buffer : bPool) {
while (!buffer.isEmpty()) {
Mutation m = buffer.poll();
bufMutator.mutate(m);
}
}
bufMutator.flush();
bufMutator.close();
}
Aggregations