use of org.apache.hadoop.hbase.client.HTablePool in project honeycomb by altamiracorp.
the class BulkLoadMapper method setup.
@Override
protected void setup(Context context) throws IOException {
Configuration conf = context.getConfiguration();
char separator = conf.get(SEPARATOR, " ").charAt(0);
columns = conf.getStrings(SQL_COLUMNS);
String sqlTable = conf.get(SQL_TABLE);
String hbaseTable = conf.get(ConfigConstants.TABLE_NAME);
String columnFamily = conf.get(ConfigConstants.COLUMN_FAMILY);
// Check that necessary configuration variables are set
checkNotNull(conf.get(HConstants.ZOOKEEPER_QUORUM), HConstants.ZOOKEEPER_QUORUM + NOT_SET_ERROR);
checkNotNull(sqlTable, SQL_TABLE + NOT_SET_ERROR);
checkNotNull(columns, SQL_COLUMNS + NOT_SET_ERROR);
checkNotNull(hbaseTable, ConfigConstants.TABLE_NAME + NOT_SET_ERROR);
checkNotNull(columnFamily, ConfigConstants.COLUMN_FAMILY + NOT_SET_ERROR);
LOG.info("Zookeeper: " + conf.get(HConstants.ZOOKEEPER_QUORUM));
LOG.info("SQL Table: " + sqlTable);
LOG.info("HBase Table: " + hbaseTable);
LOG.info("HBase Column Family: " + columnFamily);
LOG.info("Input separator: '" + separator + "'");
final HTablePool pool = new HTablePool(conf, 1);
HBaseMetadata metadata = new HBaseMetadata(new PoolHTableProvider(hbaseTable, pool));
metadata.setColumnFamily(columnFamily);
HBaseStore store = new HBaseStore(metadata, null, new MetadataCache(metadata));
tableId = store.getTableId(sqlTable);
schema = store.getSchema(sqlTable);
mutationFactory = new MutationFactory(store);
mutationFactory.setColumnFamily(columnFamily);
rowParser = new RowParser(schema, columns, separator);
checkSqlColumnsMatch(sqlTable);
}
use of org.apache.hadoop.hbase.client.HTablePool in project jstorm by alibaba.
the class AbstractHBaseClient method initFromStormConf.
public void initFromStormConf(Map stormConf) {
logger.info("init hbase client.");
Configuration conf = makeConf(stormConf);
hTablePool = new HTablePool(conf, TABLE_POOL_SIZE);
logger.info("finished init hbase client.");
}
use of org.apache.hadoop.hbase.client.HTablePool in project Solbase by Photobucket.
the class CSVFileImporter method main.
public static void main(String[] args) {
if (args.length < 1) {
System.out.println("Usage: java example.CSVFileImporter <csv filename>");
System.exit(0);
}
@SuppressWarnings("deprecation") HBaseConfiguration conf = new HBaseConfiguration();
conf.set("hbase.zookeeper.quorum", "localhost");
conf.set("hbase.zookeeper.property.clientPort", "2181");
conf.setInt("hbase.client.retries.number", 7);
conf.setInt("ipc.client.connect.max.retries", 3);
HTablePool hTablePool = new HTablePool(conf, 10);
try {
BufferedReader in = new BufferedReader(new FileReader(args[0]));
String str;
while ((str = in.readLine()) != null) {
process(str, hTablePool);
}
in.close();
} catch (IOException e) {
}
}
Aggregations