use of org.apache.tephra.Transaction in project cdap by caskdata.
the class HiveExploreTableTestRun method testNonAsciiStrings.
@Test
public void testNonAsciiStrings() throws Exception {
DatasetId ttId = NAMESPACE_ID.dataset("tt");
datasetFramework.addInstance(Table.class.getName(), ttId, TableProperties.builder().setSchema(Schema.recordOf("record", Schema.Field.of("a", Schema.of(Schema.Type.STRING)), Schema.Field.of("b", Schema.of(Schema.Type.STRING)))).setRowFieldName("a").setExploreTableName("tt").build());
try {
// Accessing dataset instance to perform data operations
Table tt = datasetFramework.getDataset(ttId, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(tt);
Transaction tx = transactionManager.startShort(100);
((TransactionAware) tt).startTx(tx);
tt.put(new Put("a", "b", "c"));
// row key and column value are non-ASCII
tt.put(new Put("ä", "b", "ç"));
((TransactionAware) tt).commitTx();
transactionManager.canCommit(tx.getTransactionId(), ((TransactionAware) tt).getTxChanges());
transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
((TransactionAware) tt).postTxCommit();
ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from tt").get();
List<Object> columns = results.next().getColumns();
Assert.assertEquals(2, columns.size());
Assert.assertEquals("a", columns.get(0));
Assert.assertEquals("c", columns.get(1));
columns = results.next().getColumns();
Assert.assertEquals(2, columns.size());
Assert.assertEquals("ä", columns.get(0));
Assert.assertEquals("ç", columns.get(1));
} finally {
datasetFramework.deleteInstance(ttId);
}
}
use of org.apache.tephra.Transaction in project cdap by caskdata.
the class WritableDatasetTestRun method writeIntoItselfTest.
@Test
public void writeIntoItselfTest() throws Exception {
try {
initKeyValueTable(MY_TABLE, true);
ListenableFuture<ExploreExecutionResult> future = exploreClient.submit(NAMESPACE_ID, String.format("insert into table %s select * from %s", MY_TABLE_NAME, MY_TABLE_NAME));
ExploreExecutionResult result = future.get();
result.close();
// Assert the values have been inserted into the dataset
KeyStructValueTableDefinition.KeyStructValueTable table = datasetFramework.getDataset(MY_TABLE, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
Transaction tx = transactionManager.startShort(100);
table.startTx(tx);
Assert.assertEquals(new KeyStructValueTableDefinition.KeyValue.Value("first", Lists.newArrayList(1, 2, 3, 4, 5)), table.get("1_2"));
Assert.assertEquals(new KeyStructValueTableDefinition.KeyValue.Value("two", Lists.newArrayList(10, 11, 12, 13, 14)), table.get("2_2"));
Assert.assertTrue(table.commitTx());
transactionManager.canCommit(tx.getTransactionId(), table.getTxChanges());
transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
table.postTxCommit();
// Make sure Hive also sees those values
result = exploreClient.submit(NAMESPACE_ID, "select * from " + MY_TABLE_NAME).get();
Assert.assertEquals("1", result.next().getColumns().get(0).toString());
Assert.assertEquals("1_2", result.next().getColumns().get(0).toString());
Assert.assertEquals("2", result.next().getColumns().get(0).toString());
Assert.assertEquals("2_2", result.next().getColumns().get(0).toString());
Assert.assertFalse(result.hasNext());
result.close();
} finally {
datasetFramework.deleteInstance(MY_TABLE);
}
}
use of org.apache.tephra.Transaction in project cdap by caskdata.
the class WritableDatasetTestRun method initKeyValueTable.
private static void initKeyValueTable(DatasetId datasetInstanceId, boolean addData) throws Exception {
// Performing admin operations to create dataset instance
datasetFramework.addInstance("keyStructValueTable", datasetInstanceId, DatasetProperties.EMPTY);
if (!addData) {
return;
}
// Accessing dataset instance to perform data operations
KeyStructValueTableDefinition.KeyStructValueTable table = datasetFramework.getDataset(datasetInstanceId, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
Transaction tx = transactionManager.startShort(100);
table.startTx(tx);
KeyStructValueTableDefinition.KeyValue.Value value1 = new KeyStructValueTableDefinition.KeyValue.Value("first", Lists.newArrayList(1, 2, 3, 4, 5));
KeyStructValueTableDefinition.KeyValue.Value value2 = new KeyStructValueTableDefinition.KeyValue.Value("two", Lists.newArrayList(10, 11, 12, 13, 14));
table.put("1", value1);
table.put("2", value2);
Assert.assertEquals(value1, table.get("1"));
Assert.assertTrue(table.commitTx());
transactionManager.canCommit(tx.getTransactionId(), table.getTxChanges());
transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
table.postTxCommit();
}
use of org.apache.tephra.Transaction in project cdap by caskdata.
the class WritableDatasetTestRun method writeFromDatasetIntoNativeTableTest.
@Test
public void writeFromDatasetIntoNativeTableTest() throws Exception {
datasetFramework.addModule(kvTable, new KeyValueTableDefinition.KeyValueTableModule());
datasetFramework.addInstance("kvTable", simpleTable, DatasetProperties.EMPTY);
try {
exploreClient.submit(NAMESPACE_ID, "create table test (first INT, second STRING) ROW FORMAT " + "DELIMITED FIELDS TERMINATED BY '\\t'").get().close();
// Accessing dataset instance to perform data operations
KeyValueTableDefinition.KeyValueTable table = datasetFramework.getDataset(simpleTable, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
Transaction tx1 = transactionManager.startShort(100);
table.startTx(tx1);
table.put(10, "ten");
Assert.assertEquals("ten", table.get(10));
Assert.assertTrue(table.commitTx());
transactionManager.canCommit(tx1.getTransactionId(), table.getTxChanges());
transactionManager.commit(tx1.getTransactionId(), tx1.getWritePointer());
table.postTxCommit();
exploreClient.submit(NAMESPACE_ID, "insert into table test select * from " + simpleTableName).get().close();
assertSelectAll(NAMESPACE_ID, "test", ImmutableList.<List<Object>>of(ImmutableList.<Object>of(10, "ten")));
} finally {
exploreClient.submit(NAMESPACE_ID, "drop table if exists test").get().close();
datasetFramework.deleteInstance(simpleTable);
datasetFramework.deleteModule(kvTable);
}
}
use of org.apache.tephra.Transaction in project cdap by caskdata.
the class WritableDatasetTestRun method writeIntoOtherDatasetTest.
@Test
public void writeIntoOtherDatasetTest() throws Exception {
datasetFramework.addModule(keyExtendedStructValueTable, new KeyExtendedStructValueTableDefinition.KeyExtendedStructValueTableModule());
datasetFramework.addInstance("keyExtendedStructValueTable", extendedTable, DatasetProperties.EMPTY);
try {
initKeyValueTable(MY_TABLE, true);
// Accessing dataset instance to perform data operations
KeyExtendedStructValueTableDefinition.KeyExtendedStructValueTable table = datasetFramework.getDataset(extendedTable, DatasetDefinition.NO_ARGUMENTS, null);
Assert.assertNotNull(table);
Transaction tx1 = transactionManager.startShort(100);
table.startTx(tx1);
KeyExtendedStructValueTableDefinition.KeyExtendedValue value1 = new KeyExtendedStructValueTableDefinition.KeyExtendedValue("10", new KeyStructValueTableDefinition.KeyValue.Value("ten", Lists.newArrayList(10, 11, 12)), 20);
table.put("10", value1);
Assert.assertEquals(value1, table.get("10"));
Assert.assertTrue(table.commitTx());
transactionManager.canCommit(tx1.getTransactionId(), table.getTxChanges());
transactionManager.commit(tx1.getTransactionId(), tx1.getWritePointer());
table.postTxCommit();
String query = String.format("insert into table %s select key,value from %s", MY_TABLE_NAME, extendedTableName);
ListenableFuture<ExploreExecutionResult> future = exploreClient.submit(NAMESPACE_ID, query);
ExploreExecutionResult result = future.get();
result.close();
result = exploreClient.submit(NAMESPACE_ID, "select * from " + MY_TABLE_NAME).get();
Assert.assertEquals("1", result.next().getColumns().get(0).toString());
Assert.assertEquals("10_2", result.next().getColumns().get(0).toString());
Assert.assertEquals("2", result.next().getColumns().get(0).toString());
Assert.assertFalse(result.hasNext());
result.close();
// Test insert overwrite
query = String.format("insert overwrite table %s select key,value from %s", MY_TABLE_NAME, extendedTableName);
result = exploreClient.submit(NAMESPACE_ID, query).get();
result.close();
result = exploreClient.submit(NAMESPACE_ID, "select * from " + MY_TABLE_NAME).get();
result.hasNext();
} finally {
datasetFramework.deleteInstance(MY_TABLE);
datasetFramework.deleteInstance(extendedTable);
datasetFramework.deleteModule(keyExtendedStructValueTable);
}
}
Aggregations