use of org.apache.kudu.client.Insert in project hive by apache.
the class TestKuduInputFormat method setUp.
@Before
public void setUp() throws Exception {
// Set the base configuration values.
BASE_CONF.set(KUDU_MASTER_ADDRS_KEY, harness.getMasterAddressesAsString());
BASE_CONF.set(KUDU_TABLE_NAME_KEY, TABLE_NAME);
BASE_CONF.set(FileInputFormat.INPUT_DIR, "dummy");
// Create the test Kudu table.
CreateTableOptions options = new CreateTableOptions().setRangePartitionColumns(ImmutableList.of("key"));
harness.getClient().createTable(TABLE_NAME, SCHEMA, options);
// Insert a test row.
KuduTable table = harness.getClient().openTable(TABLE_NAME);
KuduSession session = harness.getClient().newSession();
Insert insert = table.newInsert();
PartialRow insertRow = insert.getRow();
// Use KuduWritable, to populate the insert row.
new KuduWritable(ROW).populateRow(insertRow);
session.apply(insert);
session.close();
}
use of org.apache.kudu.client.Insert in project hive by apache.
the class TestKuduInputFormat method testPredicate.
@Test
public void testPredicate() throws Exception {
// Insert a second test row that will be filtered out.
KuduTable table = harness.getClient().openTable(TABLE_NAME);
KuduSession session = harness.getClient().newSession();
Insert insert = table.newInsert();
PartialRow row = insert.getRow();
row.addByte("key", (byte) 2);
row.addShort("int16", (short) 2);
row.addInt("int32", 2);
row.addLong("int64", 2L);
row.addBoolean("bool", false);
row.addFloat("float", 2.2f);
row.addDouble("double", 2.2d);
row.addString("string", "two");
row.addBinary("binary", "two".getBytes(UTF_8));
row.addTimestamp("timestamp", new Timestamp(NOW_MS + 1));
row.addDecimal("decimal", new BigDecimal("2.222"));
row.setNull("null");
// Not setting the "default" column.
session.apply(insert);
session.close();
KuduInputFormat input = new KuduInputFormat();
// Test an equality predicate for each column.
for (ColumnSchema col : SCHEMA.getColumns()) {
// Skip binary columns because binary predicates are not supported. (HIVE-11370)
if (col.getName().equals("null") || col.getName().equals("default") || col.getName().equals("binary")) {
continue;
}
JobConf jobConf = new JobConf(BASE_CONF);
String columnsStr = SCHEMA.getColumns().stream().map(ColumnSchema::getName).collect(Collectors.joining(","));
jobConf.set(serdeConstants.LIST_COLUMNS, columnsStr);
PrimitiveTypeInfo typeInfo = toHiveType(col.getType(), col.getTypeAttributes());
ExprNodeDesc colExpr = new ExprNodeColumnDesc(typeInfo, col.getName(), null, false);
ExprNodeDesc constExpr = new ExprNodeConstantDesc(typeInfo, ROW.getObject(col.getName()));
List<ExprNodeDesc> children = Lists.newArrayList();
children.add(colExpr);
children.add(constExpr);
ExprNodeGenericFuncDesc predicateExpr = new ExprNodeGenericFuncDesc(typeInfo, new GenericUDFOPEqual(), children);
String filterExpr = SerializationUtilities.serializeExpression(predicateExpr);
jobConf.set(TableScanDesc.FILTER_EXPR_CONF_STR, filterExpr);
InputSplit[] splits = input.getSplits(jobConf, 1);
assertEquals(1, splits.length);
KuduInputSplit split = (KuduInputSplit) splits[0];
KuduRecordReader reader = (KuduRecordReader) input.getRecordReader(split, jobConf, null);
assertTrue(reader.nextKeyValue());
RowResult value = reader.getCurrentValue().getRowResult();
verfiyRow(value);
assertFalse("Extra row on column: " + col.getName(), reader.nextKeyValue());
}
}
use of org.apache.kudu.client.Insert in project apex-malhar by apache.
the class AbstractKuduOutputOperator method processForInsert.
protected void processForInsert(KuduExecutionContext kuduExecutionContext) {
Insert thisInsert = kuduTable.newInsert();
performCommonProcessing(thisInsert, kuduExecutionContext);
}
use of org.apache.kudu.client.Insert in project presto by prestodb.
the class SchemaEmulationByTableNameConvention method createAndFillSchemasTable.
private void createAndFillSchemasTable(KuduClient client) throws KuduException {
List<String> existingSchemaNames = listSchemaNamesFromTablets(client);
ColumnSchema schemaColumnSchema = new ColumnSchema.ColumnSchemaBuilder("schema", Type.STRING).key(true).build();
Schema schema = new Schema(ImmutableList.of(schemaColumnSchema));
CreateTableOptions options = new CreateTableOptions();
options.addHashPartitions(ImmutableList.of(schemaColumnSchema.getName()), 2);
KuduTable schemasTable = client.createTable(rawSchemasTableName, schema, options);
KuduSession session = client.newSession();
try {
session.setFlushMode(SessionConfiguration.FlushMode.AUTO_FLUSH_BACKGROUND);
for (String schemaName : existingSchemaNames) {
Insert insert = schemasTable.newInsert();
insert.getRow().addString(0, schemaName);
session.apply(insert);
}
} finally {
session.close();
}
}
Aggregations