Search in sources :

Example 51 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class HiveExploreTableTestRun method testNoOpOnMissingSchema.

@Test
public void testNoOpOnMissingSchema() throws Exception {
    DatasetId datasetId = NAMESPACE_ID.dataset("noschema");
    datasetFramework.addInstance(Table.class.getName(), datasetId, DatasetProperties.EMPTY);
    try {
        DatasetSpecification spec = datasetFramework.getDatasetSpec(datasetId);
        Assert.assertEquals(QueryHandle.NO_OP, exploreTableManager.enableDataset(datasetId, spec, false));
    } finally {
        datasetFramework.deleteInstance(datasetId);
    }
}
Also used : Table(co.cask.cdap.api.dataset.table.Table) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) DatasetId(co.cask.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 52 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class HiveExploreStructuredRecordTestRun method testRecordScannableAndWritableIsOK.

@Test
public void testRecordScannableAndWritableIsOK() throws Exception {
    DatasetId instanceId = NAMESPACE_ID.dataset("tabul");
    datasetFramework.addInstance("TableWrapper", instanceId, DatasetProperties.builder().add(DatasetProperties.SCHEMA, Schema.recordOf("intRecord", Schema.Field.of("x", Schema.of(Schema.Type.STRING))).toString()).build());
    DatasetSpecification spec = datasetFramework.getDatasetSpec(instanceId);
    try {
        exploreTableManager.enableDataset(instanceId, spec, false);
        runCommand(NAMESPACE_ID, "describe dataset_tabul", true, Lists.newArrayList(new ColumnDesc("col_name", "STRING", 1, "from deserializer"), new ColumnDesc("data_type", "STRING", 2, "from deserializer"), new ColumnDesc("comment", "STRING", 3, "from deserializer")), Lists.newArrayList(new QueryResult(Lists.<Object>newArrayList("x", "string", "from deserializer"))));
    } finally {
        datasetFramework.deleteInstance(instanceId);
    }
}
Also used : QueryResult(co.cask.cdap.proto.QueryResult) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) ColumnDesc(co.cask.cdap.proto.ColumnDesc) DatasetId(co.cask.cdap.proto.id.DatasetId) Test(org.junit.Test)

Example 53 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class HBaseTableTest method testEnforceTxLifetime.

@Test
public void testEnforceTxLifetime() throws Exception {
    String tableName = "enforce-tx-lifetime";
    DatasetProperties datasetProperties = TableProperties.builder().setReadlessIncrementSupport(true).setConflictDetection(ConflictDetection.COLUMN).build();
    DatasetAdmin admin = getTableAdmin(CONTEXT1, tableName, datasetProperties);
    admin.create();
    DetachedTxSystemClient txSystemClient = new DetachedTxSystemClient();
    DatasetSpecification spec = DatasetSpecification.builder(tableName, HBaseTable.class.getName()).properties(datasetProperties.getProperties()).build();
    try {
        final HBaseTable table = new HBaseTable(CONTEXT1, spec, Collections.<String, String>emptyMap(), cConf, TEST_HBASE.getConfiguration(), hBaseTableUtil);
        Transaction tx = txSystemClient.startShort();
        table.startTx(tx);
        table.put(b("row1"), b("col1"), b("val1"));
        table.put(b("inc1"), b("col1"), Bytes.toBytes(10L));
        table.commitTx();
        table.postTxCommit();
        table.close();
        CConfiguration testCConf = CConfiguration.copy(cConf);
        // No mutations on tables using testCConf will succeed.
        testCConf.setInt(TxConstants.Manager.CFG_TX_MAX_LIFETIME, 0);
        try (final HBaseTable failTable = new HBaseTable(CONTEXT1, spec, Collections.<String, String>emptyMap(), testCConf, TEST_HBASE.getConfiguration(), hBaseTableUtil)) {
            // A put should fail
            assertTxFail(txSystemClient, failTable, new Runnable() {

                @Override
                public void run() {
                    failTable.put(b("row2"), b("col1"), b("val1"));
                }
            });
            // A delete should also fail
            assertTxFail(txSystemClient, failTable, new Runnable() {

                @Override
                public void run() {
                    failTable.delete(b("row1"));
                }
            });
            assertTxFail(txSystemClient, failTable, new Runnable() {

                @Override
                public void run() {
                    failTable.delete(b("row1"), b("col1"));
                }
            });
            // So should an increment
            assertTxFail(txSystemClient, failTable, new Runnable() {

                @Override
                public void run() {
                    failTable.increment(b("inc1"), b("col1"), 10);
                }
            });
            // incrementAndGet gets converted to a put internally
            assertTxFail(txSystemClient, failTable, new Runnable() {

                @Override
                public void run() {
                    failTable.incrementAndGet(b("inc1"), b("col1"), 10);
                }
            });
        }
        // Even safe increments should fail (this happens when readless increment is done from a mapreduce job)
        try (final HBaseTable failTable = new HBaseTable(CONTEXT1, spec, ImmutableMap.of(HBaseTable.SAFE_INCREMENTS, "true"), testCConf, TEST_HBASE.getConfiguration(), hBaseTableUtil)) {
            // So should an increment
            assertTxFail(txSystemClient, failTable, new Runnable() {

                @Override
                public void run() {
                    failTable.increment(b("inc1"), b("col1"), 10);
                }
            });
            // incrementAndGet gets converted to a put internally
            assertTxFail(txSystemClient, failTable, new Runnable() {

                @Override
                public void run() {
                    failTable.incrementAndGet(b("inc1"), b("col1"), 10);
                }
            });
        }
    } finally {
        admin.drop();
        admin.close();
    }
}
Also used : Transaction(org.apache.tephra.Transaction) DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) DatasetAdmin(co.cask.cdap.api.dataset.DatasetAdmin) DetachedTxSystemClient(org.apache.tephra.inmemory.DetachedTxSystemClient) CConfiguration(co.cask.cdap.common.conf.CConfiguration) BufferingTableTest(co.cask.cdap.data2.dataset2.lib.table.BufferingTableTest) Test(org.junit.Test)

Example 54 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class HBaseTableTest method testTTL.

@Test
public void testTTL() throws Exception {
    // for the purpose of this test it is fine not to configure ttl when creating table: we want to see if it
    // applies on reading
    int ttl = 1;
    String ttlTable = "ttl";
    String noTtlTable = "nottl";
    DatasetProperties props = TableProperties.builder().setTTL(ttl).build();
    getTableAdmin(CONTEXT1, ttlTable, props).create();
    DatasetSpecification ttlTableSpec = DatasetSpecification.builder(ttlTable, HBaseTable.class.getName()).properties(props.getProperties()).build();
    HBaseTable table = new HBaseTable(CONTEXT1, ttlTableSpec, Collections.<String, String>emptyMap(), cConf, TEST_HBASE.getConfiguration(), hBaseTableUtil);
    DetachedTxSystemClient txSystemClient = new DetachedTxSystemClient();
    Transaction tx = txSystemClient.startShort();
    table.startTx(tx);
    table.put(b("row1"), b("col1"), b("val1"));
    table.commitTx();
    TimeUnit.MILLISECONDS.sleep(1010);
    tx = txSystemClient.startShort();
    table.startTx(tx);
    table.put(b("row2"), b("col2"), b("val2"));
    table.commitTx();
    // now, we should not see first as it should have expired, but see the last one
    tx = txSystemClient.startShort();
    table.startTx(tx);
    byte[] val = table.get(b("row1"), b("col1"));
    if (val != null) {
        LOG.info("Unexpected value " + Bytes.toStringBinary(val));
    }
    Assert.assertNull(val);
    Assert.assertArrayEquals(b("val2"), table.get(b("row2"), b("col2")));
    // test a table with no TTL
    DatasetProperties props2 = TableProperties.builder().setTTL(Tables.NO_TTL).build();
    getTableAdmin(CONTEXT1, noTtlTable, props2).create();
    DatasetSpecification noTtlTableSpec = DatasetSpecification.builder(noTtlTable, HBaseTable.class.getName()).properties(props2.getProperties()).build();
    HBaseTable table2 = new HBaseTable(CONTEXT1, noTtlTableSpec, Collections.<String, String>emptyMap(), cConf, TEST_HBASE.getConfiguration(), hBaseTableUtil);
    tx = txSystemClient.startShort();
    table2.startTx(tx);
    table2.put(b("row1"), b("col1"), b("val1"));
    table2.commitTx();
    TimeUnit.SECONDS.sleep(2);
    tx = txSystemClient.startShort();
    table2.startTx(tx);
    table2.put(b("row2"), b("col2"), b("val2"));
    table2.commitTx();
    // if ttl is -1 (unlimited), it should see both
    tx = txSystemClient.startShort();
    table2.startTx(tx);
    Assert.assertArrayEquals(b("val1"), table2.get(b("row1"), b("col1")));
    Assert.assertArrayEquals(b("val2"), table2.get(b("row2"), b("col2")));
}
Also used : Transaction(org.apache.tephra.Transaction) DatasetProperties(co.cask.cdap.api.dataset.DatasetProperties) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) DetachedTxSystemClient(org.apache.tephra.inmemory.DetachedTxSystemClient) BufferingTableTest(co.cask.cdap.data2.dataset2.lib.table.BufferingTableTest) Test(org.junit.Test)

Example 55 with DatasetSpecification

use of co.cask.cdap.api.dataset.DatasetSpecification in project cdap by caskdata.

the class ExploreExecutorHttpHandler method updateDataset.

/**
 * Enable ad-hoc exploration of a dataset instance.
 */
@POST
@Path("datasets/{dataset}/update")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void updateDataset(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespace, @PathParam("dataset") String datasetName) throws BadRequestException {
    final DatasetId datasetId = new DatasetId(namespace, datasetName);
    try {
        UpdateExploreParameters params = readUpdateParameters(request);
        final DatasetSpecification oldSpec = params.getOldSpec();
        final DatasetSpecification datasetSpec = params.getNewSpec();
        QueryHandle handle;
        if (oldSpec.equals(datasetSpec)) {
            handle = QueryHandle.NO_OP;
        } else {
            handle = impersonator.doAs(datasetId, new Callable<QueryHandle>() {

                @Override
                public QueryHandle call() throws Exception {
                    return exploreTableManager.updateDataset(datasetId, datasetSpec, oldSpec);
                }
            });
        }
        JsonObject json = new JsonObject();
        json.addProperty("handle", handle.getHandle());
        responder.sendJson(HttpResponseStatus.OK, json.toString());
    } catch (IllegalArgumentException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, e.getMessage());
    } catch (ExploreException e) {
        responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Error updating explore on dataset " + datasetId);
    } catch (SQLException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, "SQL exception while trying to update explore on dataset " + datasetId);
    } catch (UnsupportedTypeException e) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, "Schema for dataset " + datasetId + " is not supported for exploration: " + e.getMessage());
    } catch (Throwable e) {
        LOG.error("Got exception:", e);
        responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
    }
}
Also used : UpdateExploreParameters(co.cask.cdap.explore.client.UpdateExploreParameters) SQLException(java.sql.SQLException) DatasetSpecification(co.cask.cdap.api.dataset.DatasetSpecification) JsonObject(com.google.gson.JsonObject) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) QueryHandle(co.cask.cdap.proto.QueryHandle) Callable(java.util.concurrent.Callable) DatasetId(co.cask.cdap.proto.id.DatasetId) ExploreException(co.cask.cdap.explore.service.ExploreException) Path(javax.ws.rs.Path) AuditPolicy(co.cask.cdap.common.security.AuditPolicy) POST(javax.ws.rs.POST)

Aggregations

DatasetSpecification (co.cask.cdap.api.dataset.DatasetSpecification)72 DatasetId (co.cask.cdap.proto.id.DatasetId)21 DatasetProperties (co.cask.cdap.api.dataset.DatasetProperties)17 IncompatibleUpdateException (co.cask.cdap.api.dataset.IncompatibleUpdateException)15 Test (org.junit.Test)14 DatasetDefinition (co.cask.cdap.api.dataset.DatasetDefinition)11 DatasetManagementException (co.cask.cdap.api.dataset.DatasetManagementException)10 POST (javax.ws.rs.POST)10 Path (javax.ws.rs.Path)10 DatasetAdmin (co.cask.cdap.api.dataset.DatasetAdmin)9 DatasetTypeMeta (co.cask.cdap.proto.DatasetTypeMeta)9 NotFoundException (co.cask.cdap.common.NotFoundException)8 AbstractDatasetDefinition (co.cask.cdap.api.dataset.lib.AbstractDatasetDefinition)7 BadRequestException (co.cask.cdap.common.BadRequestException)7 IOException (java.io.IOException)7 DatasetSpecificationSummary (co.cask.cdap.proto.DatasetSpecificationSummary)6 Map (java.util.Map)6 DatasetNotFoundException (co.cask.cdap.common.DatasetNotFoundException)5 Reconfigurable (co.cask.cdap.api.dataset.Reconfigurable)4 Updatable (co.cask.cdap.api.dataset.Updatable)4