use of org.apache.kudu.client.CreateTableOptions in project hive by apache.
the class KuduTestSetup method createAllTypesTable.
public void createAllTypesTable(KuduClient client) throws KuduException {
dropAllTypesTable(client);
CreateTableOptions options = new CreateTableOptions().addHashPartitions(Arrays.asList("key"), 4);
client.createTable(ALL_TYPES_TABLE_NAME, ALL_TYPES_SCHEMA, options);
}
use of org.apache.kudu.client.CreateTableOptions in project hive by apache.
the class TestKuduInputFormat method testMultipleSplits.
@Test
public void testMultipleSplits() throws Exception {
String tableName = "default.twoPartitionTable";
Schema schema = new Schema(Arrays.asList(new ColumnSchema.ColumnSchemaBuilder("key", Type.INT32).key(true).build(), new ColumnSchema.ColumnSchemaBuilder("string", Type.STRING).build()));
CreateTableOptions options = new CreateTableOptions().addHashPartitions(Collections.singletonList("key"), 2);
harness.getClient().createTable(tableName, schema, options);
// Insert multiple test rows.
KuduTable table = harness.getClient().openTable(tableName);
KuduSession session = harness.getClient().newSession();
Insert insert1 = table.newInsert();
PartialRow row1 = insert1.getRow();
row1.addInt("key", 1);
row1.addString("string", "one");
session.apply(insert1);
Insert insert2 = table.newInsert();
PartialRow row2 = insert2.getRow();
row2.addInt("key", 2);
row2.addString("string", "two");
session.apply(insert2);
session.close();
KuduInputFormat input = new KuduInputFormat();
JobConf jobConf = new JobConf(BASE_CONF);
jobConf.set(KUDU_TABLE_NAME_KEY, tableName);
jobConf.set(serdeConstants.LIST_COLUMNS, "key");
InputSplit[] splits = input.getSplits(jobConf, 1);
assertEquals(2, splits.length);
}
use of org.apache.kudu.client.CreateTableOptions in project hive by apache.
the class TestKuduPredicateHandler method setUp.
@Before
public void setUp() throws Exception {
// Set the base configuration values.
BASE_CONF.set(KUDU_MASTER_ADDRS_KEY, harness.getMasterAddressesAsString());
BASE_CONF.set(KUDU_TABLE_NAME_KEY, TABLE_NAME);
BASE_CONF.set(FileInputFormat.INPUT_DIR, "dummy");
// Create the test Kudu table.
CreateTableOptions options = new CreateTableOptions().setRangePartitionColumns(ImmutableList.of("key"));
harness.getClient().createTable(TABLE_NAME, SCHEMA, options);
}
use of org.apache.kudu.client.CreateTableOptions in project hive by apache.
the class TestKuduOutputFormat method setUp.
@Before
public void setUp() throws Exception {
// Set the base configuration values.
BASE_CONF.set(KUDU_MASTER_ADDRS_KEY, harness.getMasterAddressesAsString());
TBL_PROPS.setProperty(KUDU_TABLE_NAME_KEY, TABLE_NAME);
// Create the test Kudu table.
CreateTableOptions options = new CreateTableOptions().setRangePartitionColumns(ImmutableList.of("key"));
harness.getClient().createTable(TABLE_NAME, SCHEMA, options);
}
use of org.apache.kudu.client.CreateTableOptions in project drill by axbaretto.
the class KuduRecordWriterImpl method updateSchema.
@Override
public void updateSchema(VectorAccessible batch) throws IOException {
BatchSchema schema = batch.getSchema();
int i = 0;
try {
if (!checkForTable(name)) {
List<ColumnSchema> columns = new ArrayList<>();
for (MaterializedField f : schema) {
columns.add(new ColumnSchema.ColumnSchemaBuilder(f.getName(), getType(f.getType())).nullable(f.getType().getMode() == DataMode.OPTIONAL).key(i == 0).build());
i++;
}
Schema kuduSchema = new Schema(columns);
table = client.createTable(name, kuduSchema, new CreateTableOptions());
}
} catch (Exception e) {
throw new IOException(e);
}
}
Aggregations