Search in sources :

Example 11 with ColumnDesc

use of io.cdap.cdap.proto.ColumnDesc in project cdap by caskdata.

the class HiveExploreStructuredRecordTestRun method testRecordScannableAndWritableIsOK.

@Test
public void testRecordScannableAndWritableIsOK() throws Exception {
    DatasetId instanceId = NAMESPACE_ID.dataset("tabul");
    datasetFramework.addInstance("TableWrapper", instanceId, DatasetProperties.builder().add(DatasetProperties.SCHEMA, Schema.recordOf("intRecord", Schema.Field.of("x", Schema.of(Schema.Type.STRING))).toString()).build());
    DatasetSpecification spec = datasetFramework.getDatasetSpec(instanceId);
    try {
        exploreTableManager.enableDataset(instanceId, spec, false);
        runCommand(NAMESPACE_ID, "describe dataset_tabul", true, Lists.newArrayList(new ColumnDesc("col_name", "STRING", 1, "from deserializer"), new ColumnDesc("data_type", "STRING", 2, "from deserializer"), new ColumnDesc("comment", "STRING", 3, "from deserializer")), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("x", "string", "from deserializer"))));
    } finally {
        datasetFramework.deleteInstance(instanceId);
    }
}
Also used : QueryResult(io.cdap.cdap.proto.QueryResult) DatasetSpecification(io.cdap.cdap.api.dataset.DatasetSpecification) ColumnDesc(io.cdap.cdap.proto.ColumnDesc) DatasetId(io.cdap.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 12 with ColumnDesc

use of io.cdap.cdap.proto.ColumnDesc in project cdap by caskdata.

the class HiveExploreStructuredRecordTestRun method testInsert.

@Test
public void testInsert() throws Exception {
    DatasetId copyTable = NAMESPACE_ID.dataset("emailCopy");
    datasetFramework.addInstance(Table.class.getName(), copyTable, TableProperties.builder().setSchema(EmailTableDefinition.SCHEMA).setRowFieldName("id").build());
    try {
        String command = String.format("insert into %s select * from %s", getDatasetHiveName(copyTable), MY_TABLE_NAME);
        ExploreExecutionResult result = exploreClient.submit(NAMESPACE_ID, command).get();
        Assert.assertEquals(QueryStatus.OpStatus.FINISHED, result.getStatus().getStatus());
        command = String.format("select id, subject, body, sender from %s", getDatasetHiveName(copyTable));
        runCommand(NAMESPACE_ID, command, true, Lists.newArrayList(new ColumnDesc("id", "STRING", 1, null), new ColumnDesc("subject", "STRING", 2, null), new ColumnDesc("body", "STRING", 3, null), new ColumnDesc("sender", "STRING", 4, null)), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("email1", "this is the subject", "this is the body", "sljackson@boss.com"))));
    } finally {
        datasetFramework.deleteInstance(copyTable);
    }
}
Also used : QueryResult(io.cdap.cdap.proto.QueryResult) ObjectMappedTable(io.cdap.cdap.api.dataset.lib.ObjectMappedTable) Table(io.cdap.cdap.api.dataset.table.Table) ColumnDesc(io.cdap.cdap.proto.ColumnDesc) ExploreExecutionResult(io.cdap.cdap.explore.client.ExploreExecutionResult) DatasetId(io.cdap.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 13 with ColumnDesc

use of io.cdap.cdap.proto.ColumnDesc in project cdap by caskdata.

the class HiveExploreTableTestRun method testSelectStar.

private void testSelectStar(String tableToQuery, String tableInSchema, Schema schema) throws Exception {
    List<ColumnDesc> expectedSchema = schema.equals(SCHEMA) ? Lists.newArrayList(new ColumnDesc(tableInSchema + ".bool_field", "BOOLEAN", 1, null), new ColumnDesc(tableInSchema + ".int_field", "INT", 2, null), new ColumnDesc(tableInSchema + ".long_field", "BIGINT", 3, null), new ColumnDesc(tableInSchema + ".float_field", "FLOAT", 4, null), new ColumnDesc(tableInSchema + ".double_field", "DOUBLE", 5, null), new ColumnDesc(tableInSchema + ".bytes_field", "BINARY", 6, null), new ColumnDesc(tableInSchema + ".string_field", "STRING", 7, null)) : Lists.newArrayList(new ColumnDesc(tableInSchema + ".int_field", "INT", 1, null), new ColumnDesc(tableInSchema + ".long_field", "BIGINT", 2, null), new ColumnDesc(tableInSchema + ".float_field", "FLOAT", 3, null), new ColumnDesc(tableInSchema + ".double_field", "BINARY", 4, null), new ColumnDesc(tableInSchema + ".bytes_field", "STRING", 5, null), new ColumnDesc(tableInSchema + ".new_field", "STRING", 6, null), new ColumnDesc(tableInSchema + ".string_field", "STRING", 7, null));
    ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from " + tableToQuery).get();
    // check SCHEMA
    Assert.assertEquals(expectedSchema, results.getResultSchema());
    List<Object> columns = results.next().getColumns();
    // check record1, account for the variability between SCHEMA and NEW_SCHEMA
    int index = 0;
    if (schema.equals(SCHEMA)) {
        Assert.assertFalse((Boolean) columns.get(index++));
    }
    Assert.assertEquals(Integer.MAX_VALUE, columns.get(index++));
    Assert.assertEquals(Long.MAX_VALUE, columns.get(index++));
    // why does this come back as a double when it's a float???
    Assert.assertTrue(Math.abs(3.14f - (Double) columns.get(index++)) < 0.000001);
    if (schema.equals(SCHEMA)) {
        Assert.assertTrue(Math.abs(3.14 - (Double) columns.get(index++)) < 0.000001);
        Assert.assertArrayEquals(new byte[] { 'A', 'B', 'C' }, (byte[]) columns.get(index++));
    } else {
        Assert.assertArrayEquals(Bytes.toBytes(3.14D), (byte[]) columns.get(index++));
        Assert.assertEquals("ABC", columns.get(index++));
        Assert.assertNull(columns.get(index++));
    }
    Assert.assertEquals("row1", columns.get(index));
    // should not be any more
    Assert.assertFalse(results.hasNext());
}
Also used : ColumnDesc(io.cdap.cdap.proto.ColumnDesc) ExploreExecutionResult(io.cdap.cdap.explore.client.ExploreExecutionResult)

Example 14 with ColumnDesc

use of io.cdap.cdap.proto.ColumnDesc in project cdap by caskdata.

the class HiveExploreTableTestRun method testTableWithDateTimestamp.

@Test
public void testTableWithDateTimestamp() throws Exception {
    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
    DatasetId dtTsTable = NAMESPACE_ID.dataset("dt_ts_table");
    DatasetId otherDtTsTable = NAMESPACE_ID.dataset("other_dt_ts_table");
    Schema schema = Schema.recordOf("recordWithDateTimestamp", Schema.Field.of("int_field", Schema.of(Schema.Type.INT)), Schema.Field.of("string_field", Schema.nullableOf(Schema.of(Schema.Type.STRING))), Schema.Field.of("date_field", Schema.nullableOf(Schema.of(Schema.LogicalType.DATE))), Schema.Field.of("ts_millis_field", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MILLIS))), Schema.Field.of("ts_micros_field", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MICROS))));
    datasetFramework.addInstance(Table.class.getName(), dtTsTable, TableProperties.builder().setSchema(schema).setRowFieldName("int_field").setExploreTableName("dt_ts_table").build());
    datasetFramework.addInstance(Table.class.getName(), otherDtTsTable, TableProperties.builder().setSchema(schema).setRowFieldName("int_field").setExploreTableName("other_dt_ts_table").build());
    try {
        // Accessing dataset instance to perform data operations
        Table table = datasetFramework.getDataset(dtTsTable, DatasetDefinition.NO_ARGUMENTS, null);
        Assert.assertNotNull(table);
        Transaction tx = transactionManager.startShort(100);
        ((TransactionAware) table).startTx(tx);
        Put put = new Put(Bytes.toBytes("row1"));
        put.add("int_field", 1);
        put.add("string_field", "alice");
        put.add("date_field", 0);
        put.add("ts_millis_field", 1536336590595L);
        put.add("ts_micros_field", 1536336590595123L);
        table.put(put);
        put = new Put(Bytes.toBytes("row2"));
        put.add("int_field", 2);
        put.add("string_field", "bob");
        table.put(put);
        ((TransactionAware) table).commitTx();
        transactionManager.canCommit(tx.getTransactionId(), ((TransactionAware) table).getTxChanges());
        transactionManager.commit(tx.getTransactionId(), tx.getWritePointer());
        ((TransactionAware) table).postTxCommit();
        ExploreExecutionResult results = exploreClient.submit(NAMESPACE_ID, "select * from dt_ts_table").get();
        List<Object> columns = results.next().getColumns();
        Assert.assertEquals(5, columns.size());
        Assert.assertEquals("alice", columns.get(1));
        Assert.assertEquals("1970-01-01", columns.get(2));
        Assert.assertEquals("2018-09-07 16:09:50.595", columns.get(3));
        Assert.assertEquals("2018-09-07 16:09:50.595123", columns.get(4));
        columns = results.next().getColumns();
        Assert.assertEquals(5, columns.size());
        Assert.assertEquals("bob", columns.get(1));
        Assert.assertNull(columns.get(2));
        Assert.assertNull(columns.get(3));
        Assert.assertNull(columns.get(4));
        String command = "insert into other_dt_ts_table select int_field, string_field, date_field, ts_millis_field, " + "ts_micros_field from dt_ts_table";
        ExploreExecutionResult result = exploreClient.submit(NAMESPACE_ID, command).get();
        Assert.assertEquals(QueryStatus.OpStatus.FINISHED, result.getStatus().getStatus());
        command = "select string_field, date_field, ts_millis_field, ts_micros_field from other_dt_ts_table";
        runCommand(NAMESPACE_ID, command, true, Lists.newArrayList(new ColumnDesc("string_field", "STRING", 1, null), new ColumnDesc("date_field", "DATE", 2, null), new ColumnDesc("ts_millis_field", "TIMESTAMP", 3, null), new ColumnDesc("ts_micros_field", "TIMESTAMP", 4, null)), Lists.newArrayList(new QueryResult(Lists.newArrayList("alice", "1970-01-01", "2018-09-07 16:09:50.595", "2018-09-07 16:09:50.595123")), new QueryResult(Lists.newArrayList("bob", null, null, null))));
    } finally {
        datasetFramework.deleteInstance(dtTsTable);
        datasetFramework.deleteInstance(otherDtTsTable);
    }
}
Also used : QueryResult(io.cdap.cdap.proto.QueryResult) Table(io.cdap.cdap.api.dataset.table.Table) Transaction(org.apache.tephra.Transaction) TransactionAware(org.apache.tephra.TransactionAware) Schema(io.cdap.cdap.api.data.schema.Schema) ColumnDesc(io.cdap.cdap.proto.ColumnDesc) ExploreExecutionResult(io.cdap.cdap.explore.client.ExploreExecutionResult) Put(io.cdap.cdap.api.dataset.table.Put) DatasetId(io.cdap.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 15 with ColumnDesc

use of io.cdap.cdap.proto.ColumnDesc in project cdap by caskdata.

the class ExploreStatementTest method executeTest.

@Test
public void executeTest() throws Exception {
    List<ColumnDesc> columnDescriptions = Lists.newArrayList(new ColumnDesc("column1", "STRING", 1, ""));
    List<QueryResult> queryResults = Lists.newArrayList();
    ExploreClient exploreClient = new MockExploreClient(ImmutableMap.of("mock_query_1", columnDescriptions, "mock_query_2", columnDescriptions, "mock_query_3", columnDescriptions, "mock_query_4", columnDescriptions), ImmutableMap.of("mock_query_1", queryResults, "mock_query_2", queryResults, "mock_query_3", queryResults, "mock_query_4", queryResults));
    // Make sure an empty query still has a ResultSet associated to it
    ExploreStatement statement = new ExploreStatement(null, exploreClient, "ns1");
    Assert.assertTrue(statement.execute("mock_query_1"));
    ResultSet rs = statement.getResultSet();
    Assert.assertNotNull(rs);
    Assert.assertFalse(rs.isClosed());
    Assert.assertFalse(rs.next());
    rs = statement.executeQuery("mock_query_2");
    Assert.assertNotNull(rs);
    Assert.assertFalse(rs.isClosed());
    Assert.assertFalse(rs.next());
    // Make sure subsequent calls to an execute method close the previous results
    ResultSet rs2 = statement.executeQuery("mock_query_3");
    Assert.assertTrue(rs.isClosed());
    Assert.assertNotNull(rs2);
    Assert.assertFalse(rs2.isClosed());
    Assert.assertFalse(rs2.next());
    Assert.assertTrue(statement.execute("mock_query_4"));
    Assert.assertTrue(rs2.isClosed());
}
Also used : ExploreClient(io.cdap.cdap.explore.client.ExploreClient) MockExploreClient(io.cdap.cdap.explore.client.MockExploreClient) QueryResult(io.cdap.cdap.proto.QueryResult) MockExploreClient(io.cdap.cdap.explore.client.MockExploreClient) ResultSet(java.sql.ResultSet) ColumnDesc(io.cdap.cdap.proto.ColumnDesc) Test(org.junit.Test)

Aggregations

ColumnDesc (io.cdap.cdap.proto.ColumnDesc)36 QueryResult (io.cdap.cdap.proto.QueryResult)23 Test (org.junit.Test)21 DatasetId (io.cdap.cdap.proto.id.DatasetId)16 ExploreExecutionResult (io.cdap.cdap.explore.client.ExploreExecutionResult)12 TimePartitionedFileSet (io.cdap.cdap.api.dataset.lib.TimePartitionedFileSet)9 Location (org.apache.twill.filesystem.Location)8 PartitionedFileSet (io.cdap.cdap.api.dataset.lib.PartitionedFileSet)7 SQLException (java.sql.SQLException)7 FileSet (io.cdap.cdap.api.dataset.lib.FileSet)6 Schema (io.cdap.cdap.api.data.schema.Schema)4 Table (io.cdap.cdap.api.dataset.table.Table)4 ExploreClient (io.cdap.cdap.explore.client.ExploreClient)4 MockExploreClient (io.cdap.cdap.explore.client.MockExploreClient)4 QueryStatus (io.cdap.cdap.proto.QueryStatus)4 ResultSet (java.sql.ResultSet)4 ImmutableList (com.google.common.collect.ImmutableList)3 PartitionKey (io.cdap.cdap.api.dataset.lib.PartitionKey)3 PartitionedFileSetProperties (io.cdap.cdap.api.dataset.lib.PartitionedFileSetProperties)3 QueryHandle (io.cdap.cdap.proto.QueryHandle)3