use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class TimeseriesDataset method write.
/**
* Writes constructed value. This implementation overrides the existing value.
* This method can be overridden to apply update logic relevant to the subclass (e.g. increment counter).
*
* @param row row key to write to
* @param columnName column name to write to
* @param value value passed with {@link Entry} into
*/
void write(byte[] row, byte[] columnName, byte[] value) {
Put put = new Put(row, columnName, value);
table.put(put);
}
use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class IndexedTableTest method testIndexedRangeLookups.
@Test
public void testIndexedRangeLookups() throws Exception {
DatasetId indexRangedLookupDs = DatasetFrameworkTestUtil.NAMESPACE_ID.dataset("rangeLookup");
dsFrameworkUtil.createInstance("indexedTable", indexRangedLookupDs, DatasetProperties.builder().add(IndexedTable.INDEX_COLUMNS_CONF_KEY, idxColString).build());
final IndexedTable iTable = dsFrameworkUtil.getInstance(indexRangedLookupDs);
TransactionExecutor txnl = dsFrameworkUtil.newTransactionExecutor(iTable);
try {
// start a new transaction
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// perform 5 puts, using idx values 1,2,3,4,5
iTable.put(new Put(keyE).add(idxCol, idx4).add(valCol, valE));
iTable.put(new Put(keyC).add(idxCol, idx1).add(valCol, valC));
iTable.put(new Put(keyD).add(idxCol, idx5).add(valCol, valA));
iTable.put(new Put(keyB).add(idxCol, idx2).add(valCol, valB));
iTable.put(new Put(keyA).add(idxCol, idx3).add(valCol, valD));
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// do a scan using idx value range [idx2, idx5). Assert that we retrieve idx2, idx3, idx4.
Scanner scanner = iTable.scanByIndex(idxCol, idx2, idx5);
Row next = scanner.next();
Assert.assertNotNull(next);
Assert.assertTrue(Bytes.equals(keyB, next.getRow()));
Assert.assertTrue(Bytes.equals(valB, next.get(valCol)));
next = scanner.next();
Assert.assertNotNull(next);
Assert.assertTrue(Bytes.equals(keyA, next.getRow()));
Assert.assertTrue(Bytes.equals(valD, next.get(valCol)));
next = scanner.next();
Assert.assertNotNull(next);
Assert.assertTrue(Bytes.equals(keyE, next.getRow()));
Assert.assertTrue(Bytes.equals(valE, next.get(valCol)));
assertEmpty(scanner);
}
});
txnl.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
// do a scan using idx value range [null (first row), idx3). Assert that we retrieve the values corresponding
// to idx1, idx2.
Scanner scanner = iTable.scanByIndex(idxCol, null, idx3);
Row next = scanner.next();
Assert.assertNotNull(next);
Assert.assertTrue(Bytes.equals(keyC, next.getRow()));
Assert.assertTrue(Bytes.equals(valC, next.get(valCol)));
next = scanner.next();
Assert.assertNotNull(next);
Assert.assertTrue(Bytes.equals(keyB, next.getRow()));
Assert.assertTrue(Bytes.equals(valB, next.get(valCol)));
assertEmpty(scanner);
}
});
} finally {
dsFrameworkUtil.deleteInstance(indexRangedLookupDs);
}
}
use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class NoSqlStructuredTable method convertFieldsToBytes.
/**
* Convert the fields to a {@link Put} to write to table. The primary key must all be provided. The method will
* add the table name as prefix to the row key.
*
* @param fields the fields to write
* @return a PUT object
* @throws InvalidFieldException if primary keys are missing or the column is not in schema
*/
private Put convertFieldsToBytes(Collection<Field<?>> fields) throws InvalidFieldException {
Set<String> fieldNames = fields.stream().map(Field::getName).collect(Collectors.toSet());
if (!fieldNames.containsAll(schema.getPrimaryKeys())) {
throw new InvalidFieldException(schema.getTableId(), fields, String.format("Given fields %s does not contain all the " + "primary keys %s", fieldNames, schema.getPrimaryKeys()));
}
int numColumns = fields.size() - schema.getPrimaryKeys().size();
// add the table name as the prefix
MDSKey.Builder key = new MDSKey.Builder(keyPrefix);
byte[][] columns = new byte[numColumns][];
byte[][] values = new byte[numColumns][];
int i = 0;
for (Field<?> field : fields) {
fieldValidator.validateField(field);
if (schema.isPrimaryKeyColumn(field.getName())) {
addKey(key, field, schema.getType(field.getName()));
} else {
if (schema.getType(field.getName()) == null) {
throw new InvalidFieldException(schema.getTableId(), field.getName());
}
columns[i] = Bytes.toBytes(field.getName());
values[i] = fieldToBytes(field);
i++;
}
}
Put put = new Put(key.build().getKey());
for (int index = 0; index < columns.length; index++) {
put.add(columns[index], values[index]);
}
return put;
}
use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class HiveExploreTableTestRun method testNonAsciiStrings.
@Test
public void testNonAsciiStrings() throws Exception {
DatasetId ttId = NAMESPACE_ID.dataset("tt");
datasetFramework.addInstance(Table.class.getName(), ttId, TableProperties.builder().setSchema(Schema.recordOf("record", Schema.Field.of("a", Schema.of(Schema.Type.STRING)), Schema.Field.of("b", Schema.of(Schema.Type.STRING)))).setRowFieldName("a").setExploreTableName("tt").build());
try {
// Accessing dataset instance to perform data operations
Table tt = datasetFramework.getDataset(ttId, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(tt);
Transaction tx = transactionManager.startShort(100);
((TransactionAware) tt).startTx(tx);
tt.put(new Put("a", "b", "c"));
// row key and column value are non-ASCII
tt.put(new Put("ä", "b", "ç"));
((TransactionAware) tt).commitTx();
transactionManager.canCommit(tx.getTransactionId(), ((TransactionAware) tt).getTxChanges());
transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
((TransactionAware) tt).postTxCommit();
ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from tt").get();
List<Object> columns = results.next().getColumns();
Assert.assertEquals(2, columns.size());
Assert.assertEquals("a", columns.get(0));
Assert.assertEquals("c", columns.get(1));
columns = results.next().getColumns();
Assert.assertEquals(2, columns.size());
Assert.assertEquals("ä", columns.get(0));
Assert.assertEquals("ç", columns.get(1));
} finally {
datasetFramework.deleteInstance(ttId);
}
}
use of io.cdap.cdap.api.dataset.table.Put in project cdap by caskdata.
the class HiveExploreTableTestRun method testTableWithDateTimestamp.
@Test
public void testTableWithDateTimestamp() throws Exception {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
DatasetId dtTsTable = NAMESPACE_ID.dataset("dt_ts_table");
DatasetId otherDtTsTable = NAMESPACE_ID.dataset("other_dt_ts_table");
Schema schema = Schema.recordOf("recordWithDateTimestamp", Schema.Field.of("int_field", Schema.of(Schema.Type.INT)), Schema.Field.of("string_field", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("date_field", Schema.nullableOf(Schema.of(Schema.LogicalType.DATE))), Schema.Field.of("ts_millis_field", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MILLIS))), Schema.Field.of("ts_micros_field", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MICROS))));
datasetFramework.addInstance(Table.class.getName(), dtTsTable, TableProperties.builder().setSchema(schema).setRowFieldName("int_field").setExploreTableName("dt_ts_table").build());
datasetFramework.addInstance(Table.class.getName(), otherDtTsTable, TableProperties.builder().setSchema(schema).setRowFieldName("int_field").setExploreTableName("other_dt_ts_table").build());
try {
// Accessing dataset instance to perform data operations
Table table = datasetFramework.getDataset(dtTsTable, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
Transaction tx = transactionManager.startShort(100);
((TransactionAware) table).startTx(tx);
Put put = new Put(Bytes.toBytes("row1"));
put.add("int_field", 1);
put.add("string_field", "alice");
put.add("date_field", 0);
put.add("ts_millis_field", 1536336590595L);
put.add("ts_micros_field", 1536336590595123L);
table.put(put);
put = new Put(Bytes.toBytes("row2"));
put.add("int_field", 2);
put.add("string_field", "bob");
table.put(put);
((TransactionAware) table).commitTx();
transactionManager.canCommit(tx.getTransactionId(), ((TransactionAware) table).getTxChanges());
transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
((TransactionAware) table).postTxCommit();
ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from dt_ts_table").get();
List<Object> columns = results.next().getColumns();
Assert.assertEquals(5, columns.size());
Assert.assertEquals("alice", columns.get(1));
Assert.assertEquals("1970-01-01", columns.get(2));
Assert.assertEquals("2018-09-07 16:09:50.595", columns.get(3));
Assert.assertEquals("2018-09-07 16:09:50.595123", columns.get(4));
columns = results.next().getColumns();
Assert.assertEquals(5, columns.size());
Assert.assertEquals("bob", columns.get(1));
Assert.assertNull(columns.get(2));
Assert.assertNull(columns.get(3));
Assert.assertNull(columns.get(4));
String command = "insert into other_dt_ts_table select int_field, string_field, date_field, ts_millis_field, " + "ts_micros_field from dt_ts_table";
ExploreExecutionResult result = exploreClient.submit(NAMESPACE_ID, command).get();
Assert.assertEquals(QueryStatus.OpStatus.FINISHED, result.getStatus().getStatus());
command = "select string_field, date_field, ts_millis_field, ts_micros_field from other_dt_ts_table";
runCommand(NAMESPACE_ID, command, true, Lists.newArrayList(new ColumnDesc("string_field", "STRING", 1, null), new ColumnDesc("date_field", "DATE", 2, null), new ColumnDesc("ts_millis_field", "TIMESTAMP", 3, null), new ColumnDesc("ts_micros_field", "TIMESTAMP", 4, null)), Lists.newArrayList(new QueryResult(Lists.newArrayList("alice", "1970-01-01", "2018-09-07 16:09:50.595", "2018-09-07 16:09:50.595123")), new QueryResult(Lists.newArrayList("bob", null, null, null))));
} finally {
datasetFramework.deleteInstance(dtTsTable);
datasetFramework.deleteInstance(otherDtTsTable);
}
}
Aggregations