Search in sources :

Example 6 with Partitioning

use of io.cdap.cdap.api.dataset.lib.Partitioning in project cdap by cdapio.

the class PartitionedFileSetDefinition method configure.

@Override
public DatasetSpecification configure(String instanceName, DatasetProperties properties) {
    Partitioning partitioning = PartitionedFileSetProperties.getPartitioning(properties.getProperties());
    Preconditions.checkArgument(partitioning != null, "Properties do not contain partitioning");
    // define the columns for indexing on the partitionsTable
    DatasetProperties indexedTableProperties = DatasetProperties.builder().addAll(properties.getProperties()).add(IndexedTable.INDEX_COLUMNS_CONF_KEY, INDEXED_COLS).build();
    Map<String, String> pfsProperties = new HashMap<>(properties.getProperties());
    // this property allows us to distinguish between datasets that were created
    // before base path was explicitly set and those created after.
    // this is important to know when a pfs is updated, as we want to keep the old base path behavior for
    // previously created datasets
    String defaultBasePathStr = properties.getProperties().get(NAME_AS_BASE_PATH_DEFAULT);
    boolean useNameAsBasePathDefault = defaultBasePathStr == null || Boolean.parseBoolean(defaultBasePathStr);
    DatasetProperties.Builder fileProperties = DatasetProperties.builder().addAll(properties.getProperties());
    // and when the dataset is deleted, only the 'files' dir will be deleted and not the dataset name dir.
    if (useNameAsBasePathDefault && !properties.getProperties().containsKey(FileSetProperties.BASE_PATH)) {
        fileProperties.add(FileSetProperties.BASE_PATH, instanceName);
        pfsProperties.put(NAME_AS_BASE_PATH_DEFAULT, Boolean.TRUE.toString());
    }
    return DatasetSpecification.builder(instanceName, getName()).properties(pfsProperties).datasets(filesetDef.configure(FILESET_NAME, fileProperties.build()), indexedTableDef.configure(PARTITION_TABLE_NAME, indexedTableProperties)).build();
}
Also used : Partitioning(io.cdap.cdap.api.dataset.lib.Partitioning) HashMap(java.util.HashMap) DatasetProperties(io.cdap.cdap.api.dataset.DatasetProperties)

Example 7 with Partitioning

use of io.cdap.cdap.api.dataset.lib.Partitioning in project cdap by cdapio.

the class PartitionedFileSetDefinition method getDataset.

@Override
public PartitionedFileSet getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException {
    // properties must contain the partitioning
    Partitioning partitioning = PartitionedFileSetProperties.getPartitioning(spec.getProperties());
    // make any necessary updates to the arguments
    arguments = updateArgumentsIfNeeded(arguments, partitioning);
    FileSet fileset = filesetDef.getDataset(datasetContext, spec.getSpecification(FILESET_NAME), arguments, classLoader);
    IndexedTable table = indexedTableDef.getDataset(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), arguments, classLoader);
    return new PartitionedFileSetDataset(datasetContext, spec.getName(), partitioning, fileset, table, spec, arguments, getExploreProvider());
}
Also used : Partitioning(io.cdap.cdap.api.dataset.lib.Partitioning) PartitionedFileSet(io.cdap.cdap.api.dataset.lib.PartitionedFileSet) FileSet(io.cdap.cdap.api.dataset.lib.FileSet) IndexedTable(io.cdap.cdap.api.dataset.lib.IndexedTable)

Example 8 with Partitioning

use of io.cdap.cdap.api.dataset.lib.Partitioning in project cdap by cdapio.

the class ExploreTableManager method generateFileSetCreateStatement.

/**
 * Generate a create statement for a ((time-)partitioned) file set.
 *
 * @param dataset the instantiated dataset
 * @param datasetId the dataset id
 * @param properties the properties from dataset specification
 * @param truncating whether this call to create() is part of a truncate() operation. The effect is:
 *                   If possessExisting is true, then the truncate() has just dropped this
 *                   dataset and that deleted the explore table: we must recreate it.
 *
 * @return a CREATE TABLE statement, or null if the dataset is not explorable
 * @throws IllegalArgumentException if the schema cannot be parsed, or if shouldErrorOnMissingSchema is true and
 *                                  the dataset spec does not contain a schema.
 */
@Nullable
private String generateFileSetCreateStatement(DatasetId datasetId, Dataset dataset, Map<String, String> properties, boolean truncating) throws IllegalArgumentException, ExploreException {
    String tableName = tableNaming.getTableName(datasetId, properties);
    String databaseName = ExploreProperties.getExploreDatabaseName(properties);
    Map<String, String> tableProperties = FileSetProperties.getTableProperties(properties);
    // if this dataset reuses an existing table, do not attempt to create it
    if (FileSetProperties.isUseExisting(tableProperties) || (FileSetProperties.isPossessExisting(tableProperties) && !truncating)) {
        try {
            exploreService.getTableInfo(datasetId.getNamespace(), databaseName, tableName);
            // table exists: do not attempt to create
            return null;
        } catch (TableNotFoundException e) {
            throw new ExploreException(String.format("Dataset '%s' is configured to use an existing explore table, but table '%s' does not " + "exist in database '%s'. ", datasetId.getDataset(), tableName, databaseName));
        }
    }
    Location baseLocation;
    Partitioning partitioning = null;
    if (dataset instanceof PartitionedFileSet) {
        partitioning = ((PartitionedFileSet) dataset).getPartitioning();
        baseLocation = ((PartitionedFileSet) dataset).getEmbeddedFileSet().getBaseLocation();
    } else {
        baseLocation = ((FileSet) dataset).getBaseLocation();
    }
    CreateStatementBuilder createStatementBuilder = new CreateStatementBuilder(datasetId.getDataset(), databaseName, tableName, shouldEscapeColumns).setLocation(baseLocation).setPartitioning(partitioning).setTableProperties(tableProperties);
    String schema = FileSetProperties.getExploreSchema(properties);
    String format = FileSetProperties.getExploreFormat(properties);
    if (format != null) {
        if ("parquet".equals(format)) {
            return createStatementBuilder.setSchema(FileSetProperties.getExploreSchema(properties)).buildWithFileFormat("parquet");
        }
        // for text and csv, we know what to do
        Preconditions.checkArgument("text".equals(format) || "csv".equals(format), "Only text and csv are supported as native formats");
        Preconditions.checkNotNull(schema, "for native formats, explore schema must be given in dataset properties");
        String delimiter = null;
        if ("text".equals(format)) {
            delimiter = FileSetProperties.getExploreFormatProperties(properties).get("delimiter");
        } else if ("csv".equals(format)) {
            delimiter = ",";
        }
        return createStatementBuilder.setSchema(schema).setRowFormatDelimited(delimiter, null).buildWithFileFormat("TEXTFILE");
    } else {
        // They can be created by setting the avro.schema.literal table property
        if (schema != null) {
            createStatementBuilder.setSchema(schema);
        }
        // format not given, look for serde, input format, etc.
        String serde = FileSetProperties.getSerDe(properties);
        String inputFormat = FileSetProperties.getExploreInputFormat(properties);
        String outputFormat = FileSetProperties.getExploreOutputFormat(properties);
        Preconditions.checkArgument(serde != null && inputFormat != null && outputFormat != null, "All of SerDe, InputFormat and OutputFormat must be given in dataset properties");
        return createStatementBuilder.setRowFormatSerde(serde).buildWithFormats(inputFormat, outputFormat);
    }
}
Also used : Partitioning(io.cdap.cdap.api.dataset.lib.Partitioning) CreateStatementBuilder(io.cdap.cdap.explore.table.CreateStatementBuilder) PartitionedFileSet(io.cdap.cdap.api.dataset.lib.PartitionedFileSet) Location(org.apache.twill.filesystem.Location) Nullable(javax.annotation.Nullable)

Example 9 with Partitioning

use of io.cdap.cdap.api.dataset.lib.Partitioning in project cdap by cdapio.

the class ExploreExecutorHttpHandler method doPartitionOperation.

private void doPartitionOperation(FullHttpRequest request, HttpResponder responder, DatasetId datasetId, PartitionOperation partitionOperation) {
    try (SystemDatasetInstantiator datasetInstantiator = datasetInstantiatorFactory.create()) {
        Dataset dataset;
        try {
            dataset = datasetInstantiator.getDataset(datasetId);
        } catch (Exception e) {
            LOG.error("Exception instantiating dataset {}.", datasetId, e);
            responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, "Exception instantiating dataset " + datasetId);
            return;
        }
        try {
            if (!(dataset instanceof PartitionedFileSet)) {
                responder.sendString(HttpResponseStatus.BAD_REQUEST, "not a partitioned dataset.");
                return;
            }
            Partitioning partitioning = ((PartitionedFileSet) dataset).getPartitioning();
            Reader reader = new InputStreamReader(new ByteBufInputStream(request.content()));
            Map<String, String> properties = GSON.fromJson(reader, new TypeToken<Map<String, String>>() {
            }.getType());
            PartitionKey partitionKey;
            try {
                partitionKey = PartitionedFileSetArguments.getOutputPartitionKey(properties, partitioning);
            } catch (Exception e) {
                responder.sendString(HttpResponseStatus.BAD_REQUEST, "invalid partition key: " + e.getMessage());
                return;
            }
            if (partitionKey == null) {
                responder.sendString(HttpResponseStatus.BAD_REQUEST, "no partition key was given.");
                return;
            }
            QueryHandle handle = partitionOperation.submitOperation(partitionKey, properties);
            if (handle == null) {
                return;
            }
            JsonObject json = new JsonObject();
            json.addProperty("handle", handle.getHandle());
            responder.sendJson(HttpResponseStatus.OK, json.toString());
        } finally {
            Closeables.closeQuietly(dataset);
        }
    } catch (Throwable e) {
        LOG.error("Got exception:", e);
        responder.sendString(HttpResponseStatus.INTERNAL_SERVER_ERROR, e.getMessage());
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) Dataset(io.cdap.cdap.api.dataset.Dataset) Reader(java.io.Reader) InputStreamReader(java.io.InputStreamReader) JsonObject(com.google.gson.JsonObject) PartitionedFileSet(io.cdap.cdap.api.dataset.lib.PartitionedFileSet) ByteBufInputStream(io.netty.buffer.ByteBufInputStream) ExploreException(io.cdap.cdap.explore.service.ExploreException) UnsupportedTypeException(io.cdap.cdap.api.data.schema.UnsupportedTypeException) UnauthorizedException(io.cdap.cdap.security.spi.authorization.UnauthorizedException) SQLException(java.sql.SQLException) JsonSyntaxException(com.google.gson.JsonSyntaxException) DatasetManagementException(io.cdap.cdap.api.dataset.DatasetManagementException) IOException(java.io.IOException) BadRequestException(io.cdap.cdap.common.BadRequestException) Partitioning(io.cdap.cdap.api.dataset.lib.Partitioning) SystemDatasetInstantiator(io.cdap.cdap.data.dataset.SystemDatasetInstantiator) TypeToken(com.google.common.reflect.TypeToken) PartitionKey(io.cdap.cdap.api.dataset.lib.PartitionKey) QueryHandle(io.cdap.cdap.proto.QueryHandle)

Example 10 with Partitioning

use of io.cdap.cdap.api.dataset.lib.Partitioning in project cdap by cdapio.

the class PartitioningTest method testBuilderGetters.

@Test
public void testBuilderGetters() {
    Partitioning partitioning = Partitioning.builder().addField("a", FieldType.STRING).addField("b", FieldType.INT).addField("c", FieldType.LONG).addStringField("d").addIntField("e").addLongField("f").build();
    Assert.assertEquals(FieldType.STRING, partitioning.getFieldType("a"));
    Assert.assertEquals(FieldType.INT, partitioning.getFieldType("b"));
    Assert.assertEquals(FieldType.LONG, partitioning.getFieldType("c"));
    Assert.assertEquals(FieldType.STRING, partitioning.getFieldType("d"));
    Assert.assertEquals(FieldType.INT, partitioning.getFieldType("e"));
    Assert.assertEquals(FieldType.LONG, partitioning.getFieldType("f"));
    Assert.assertNull(partitioning.getFieldType("x"));
    Assert.assertEquals(partitioning.getFields().keySet(), ImmutableSet.of("a", "b", "c", "d", "e", "f"));
}
Also used : Partitioning(io.cdap.cdap.api.dataset.lib.Partitioning) Test(org.junit.Test)

Aggregations

Partitioning (io.cdap.cdap.api.dataset.lib.Partitioning)22 Test (org.junit.Test)10 PartitionedFileSet (io.cdap.cdap.api.dataset.lib.PartitionedFileSet)6 HashMap (java.util.HashMap)6 DatasetProperties (io.cdap.cdap.api.dataset.DatasetProperties)4 PartitionKey (io.cdap.cdap.api.dataset.lib.PartitionKey)4 TypeToken (com.google.common.reflect.TypeToken)2 JsonObject (com.google.gson.JsonObject)2 JsonSyntaxException (com.google.gson.JsonSyntaxException)2 Schema (io.cdap.cdap.api.data.schema.Schema)2 UnsupportedTypeException (io.cdap.cdap.api.data.schema.UnsupportedTypeException)2 Dataset (io.cdap.cdap.api.dataset.Dataset)2 DatasetManagementException (io.cdap.cdap.api.dataset.DatasetManagementException)2 DatasetSpecification (io.cdap.cdap.api.dataset.DatasetSpecification)2 IncompatibleUpdateException (io.cdap.cdap.api.dataset.IncompatibleUpdateException)2 DynamicPartitioner (io.cdap.cdap.api.dataset.lib.DynamicPartitioner)2 FileSet (io.cdap.cdap.api.dataset.lib.FileSet)2 IndexedTable (io.cdap.cdap.api.dataset.lib.IndexedTable)2 BadRequestException (io.cdap.cdap.common.BadRequestException)2 SystemDatasetInstantiator (io.cdap.cdap.data.dataset.SystemDatasetInstantiator)2