Search in sources :

Example 1 with BufferedMutatorParams

use of org.apache.hadoop.hbase.client.BufferedMutatorParams in project YCSB by brianfrankcooper.

the class HBaseClient10 method getHTable.

public void getHTable(String table) throws IOException {
    final TableName tName = TableName.valueOf(table);
    synchronized (CONNECTION_LOCK) {
        this.currentTable = connection.getTable(tName);
        if (clientSideBuffering) {
            final BufferedMutatorParams p = new BufferedMutatorParams(tName);
            p.writeBufferSize(writeBufferSize);
            this.bufferedMutator = connection.getBufferedMutator(p);
        }
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) BufferedMutatorParams(org.apache.hadoop.hbase.client.BufferedMutatorParams)

Example 2 with BufferedMutatorParams

use of org.apache.hadoop.hbase.client.BufferedMutatorParams in project uavstack by uavorg.

the class HBaseDataStore method insert.

/**
 * msg 包括:
 *
 * @param tablename
 * @param entity:
 *            rowkey->cf:column->value 其中增加对_timestamp字段的处理
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
protected boolean insert(DataStoreMsg msg) {
    // 根据TABLE名进行合法验证
    Map[] maps = (Map[]) adaptor.prepareInsertObj(msg, datasource.getDataStoreConnection());
    Map<byte[], Map> entity = maps[0];
    Map<byte[], Long> entityStamp = maps[1];
    String tableName = (String) msg.get(DataStoreProtocol.HBASE_TABLE_NAME);
    // add write buffer
    BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(tableName));
    params.writeBufferSize(1024 * 1024 * 2);
    try (BufferedMutator table = datasource.getSourceConnect().getBufferedMutator(params)) {
        // 取得所有cf
        List<Put> puts = Lists.newArrayList();
        Put put = null;
        for (byte[] rowkey : entity.keySet()) {
            // 定制时间戳
            put = entityStamp.containsKey(rowkey) ? new Put(rowkey, entityStamp.get(rowkey)) : new Put(rowkey);
            // 取得column和value
            for (Object entry : entity.get(rowkey).keySet()) {
                String[] column = ((String) entry).split(":");
                put.addColumn(Bytes.toBytes(column[0]), Bytes.toBytes(column[1]), Bytes.toBytes((String) entity.get(rowkey).get(entry)));
            }
            puts.add(put);
        }
        // 批量提交
        Object[] results = new Object[puts.size()];
        // table.batch(puts, results);
        table.mutate(puts);
        // flush
        table.flush();
        // 根据插入信息操作并返回结果
        return adaptor.handleInsertResult(results, msg, datasource.getDataStoreConnection());
    } catch (IOException e) {
        log.err(this, "INSERT HBASE TABLE[" + tableName + "] FAIL:" + msg.toJSONString(), e);
        return false;
    }
}
Also used : BufferedMutator(org.apache.hadoop.hbase.client.BufferedMutator) IOException(java.io.IOException) Put(org.apache.hadoop.hbase.client.Put) BufferedMutatorParams(org.apache.hadoop.hbase.client.BufferedMutatorParams) Map(java.util.Map) NavigableMap(java.util.NavigableMap)

Example 3 with BufferedMutatorParams

use of org.apache.hadoop.hbase.client.BufferedMutatorParams in project YCSB by brianfrankcooper.

the class HBaseClient1 method getHTable.

public void getHTable(String table) throws IOException {
    final TableName tName = TableName.valueOf(table);
    this.currentTable = connection.getTable(tName);
    if (clientSideBuffering) {
        final BufferedMutatorParams p = new BufferedMutatorParams(tName);
        p.writeBufferSize(writeBufferSize);
        this.bufferedMutator = connection.getBufferedMutator(p);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) BufferedMutatorParams(org.apache.hadoop.hbase.client.BufferedMutatorParams)

Example 4 with BufferedMutatorParams

use of org.apache.hadoop.hbase.client.BufferedMutatorParams in project YCSB by brianfrankcooper.

the class HBaseClient2 method getHTable.

public void getHTable(String table) throws IOException {
    final TableName tName = TableName.valueOf(table);
    this.currentTable = connection.getTable(tName);
    if (clientSideBuffering) {
        final BufferedMutatorParams p = new BufferedMutatorParams(tName);
        p.writeBufferSize(writeBufferSize);
        this.bufferedMutator = connection.getBufferedMutator(p);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) BufferedMutatorParams(org.apache.hadoop.hbase.client.BufferedMutatorParams)

Example 5 with BufferedMutatorParams

use of org.apache.hadoop.hbase.client.BufferedMutatorParams in project flink by apache.

the class HBaseSinkFunction method open.

@Override
public void open(Configuration parameters) throws Exception {
    LOG.info("start open ...");
    org.apache.hadoop.conf.Configuration config = prepareRuntimeConfiguration();
    try {
        this.mutationConverter.open();
        this.numPendingRequests = new AtomicLong(0);
        if (null == connection) {
            this.connection = ConnectionFactory.createConnection(config);
        }
        // create a parameter instance, set the table name and custom listener reference.
        BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(hTableName)).listener(this);
        if (bufferFlushMaxSizeInBytes > 0) {
            params.writeBufferSize(bufferFlushMaxSizeInBytes);
        }
        this.mutator = connection.getBufferedMutator(params);
        if (bufferFlushIntervalMillis > 0 && bufferFlushMaxMutations != 1) {
            this.executor = Executors.newScheduledThreadPool(1, new ExecutorThreadFactory("hbase-upsert-sink-flusher"));
            this.scheduledFuture = this.executor.scheduleWithFixedDelay(() -> {
                if (closed) {
                    return;
                }
                try {
                    flush();
                } catch (Exception e) {
                    // fail the sink and skip the rest of the items
                    // if the failure handler decides to throw an exception
                    failureThrowable.compareAndSet(null, e);
                }
            }, bufferFlushIntervalMillis, bufferFlushIntervalMillis, TimeUnit.MILLISECONDS);
        }
    } catch (TableNotFoundException tnfe) {
        LOG.error("The table " + hTableName + " not found ", tnfe);
        throw new RuntimeException("HBase table '" + hTableName + "' not found.", tnfe);
    } catch (IOException ioe) {
        LOG.error("Exception while creating connection to HBase.", ioe);
        throw new RuntimeException("Cannot create connection to HBase.", ioe);
    }
    LOG.info("end open.");
}
Also used : ExecutorThreadFactory(org.apache.flink.util.concurrent.ExecutorThreadFactory) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) AtomicLong(java.util.concurrent.atomic.AtomicLong) BufferedMutatorParams(org.apache.hadoop.hbase.client.BufferedMutatorParams) IOException(java.io.IOException) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) IOException(java.io.IOException) RetriesExhaustedWithDetailsException(org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException)

Aggregations

BufferedMutatorParams (org.apache.hadoop.hbase.client.BufferedMutatorParams)7 IOException (java.io.IOException)3 TableName (org.apache.hadoop.hbase.TableName)3 BufferedMutator (org.apache.hadoop.hbase.client.BufferedMutator)2 Put (org.apache.hadoop.hbase.client.Put)2 RetriesExhaustedWithDetailsException (org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException)2 ArrayList (java.util.ArrayList)1 Map (java.util.Map)1 NavigableMap (java.util.NavigableMap)1 Callable (java.util.concurrent.Callable)1 ExecutorService (java.util.concurrent.ExecutorService)1 Future (java.util.concurrent.Future)1 AtomicLong (java.util.concurrent.atomic.AtomicLong)1 ExecutorThreadFactory (org.apache.flink.util.concurrent.ExecutorThreadFactory)1 TableNotFoundException (org.apache.hadoop.hbase.TableNotFoundException)1 Connection (org.apache.hadoop.hbase.client.Connection)1