Search in sources :

Example 86 with DataSetMetadata

use of org.talend.dataprep.api.dataset.DataSetMetadata in project data-prep by Talend.

the class SynchronousAnalysisEnd method analyze.

@Override
public void analyze(String dataSetId) {
    if (StringUtils.isEmpty(dataSetId)) {
        throw new IllegalArgumentException("Data set id cannot be null or empty.");
    }
    DistributedLock datasetLock = repository.createDatasetMetadataLock(dataSetId);
    datasetLock.lock();
    try {
        DataSetMetadata metadata = repository.get(dataSetId);
        if (metadata != null) {
            metadata.getLifecycle().setImporting(false);
            LOG.info("Finished content import of data set #{}.", dataSetId);
            repository.save(metadata);
        } else {
            // $NON-NLS-1$
            LOG.info("Data set #{} no longer exists.", dataSetId);
        }
    } finally {
        datasetLock.unlock();
    }
}
Also used : DistributedLock(org.talend.dataprep.lock.DistributedLock) DataSetMetadata(org.talend.dataprep.api.dataset.DataSetMetadata)

Example 87 with DataSetMetadata

use of org.talend.dataprep.api.dataset.DataSetMetadata in project data-prep by Talend.

the class DataSetService method updateRawDataSet.

/**
 * Updates a data set content and metadata. If no data set exists for given id, data set is silently created.
 *
 * @param dataSetId The id of data set to be updated.
 * @param name The new name for the data set. Empty name (or <code>null</code>) does not update dataset name.
 * @param dataSetContent The new content for the data set. If empty, existing content will <b>not</b> be replaced.
 * For delete operation, look at {@link #delete(String)}.
 */
@RequestMapping(value = "/datasets/{id}/raw", method = PUT)
@ApiOperation(value = "Update a data set by id", notes = "Update a data set content based on provided id and PUT body. Id should be a UUID returned by the list operation. Not valid or non existing data set id returns empty content. For documentation purposes, body is typed as 'text/plain' but operation accepts binary content too.")
@Timed
@VolumeMetered
public String updateRawDataSet(// 
@PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the data set to update") String dataSetId, // 
@RequestParam(value = "name", required = false) @ApiParam(name = "name", value = "New value for the data set name") String name, // 
@RequestParam(value = "size", required = false) @ApiParam(name = "size", value = "The size of the dataSet") Long size, @ApiParam(value = "content") InputStream dataSetContent) {
    LOG.debug("updating dataset content #{}", dataSetId);
    if (name != null) {
        checkDataSetName(name);
    }
    DataSetMetadata currentDataSetMetadata = dataSetMetadataRepository.get(dataSetId);
    if (currentDataSetMetadata == null) {
        return create(name, null, size, TEXT_PLAIN_VALUE, dataSetContent);
    } else {
        // just like the creation, let's make sure invalid size forbids dataset creation
        if (size != null && size < 0) {
            LOG.warn("invalid size provided {}", size);
            throw new TDPException(UNSUPPORTED_CONTENT);
        }
        final UpdateDataSetCacheKey cacheKey = new UpdateDataSetCacheKey(currentDataSetMetadata.getId());
        final DistributedLock lock = dataSetMetadataRepository.createDatasetMetadataLock(currentDataSetMetadata.getId());
        try {
            lock.lock();
            // check the size if it's available (quick win)
            if (size != null && size > 0) {
                quotaService.checkIfAddingSizeExceedsAvailableStorage(Math.abs(size - currentDataSetMetadata.getDataSetSize()));
            }
            final DataSetMetadataBuilder datasetBuilder = metadataBuilder.metadata().id(currentDataSetMetadata.getId());
            datasetBuilder.copyNonContentRelated(currentDataSetMetadata);
            datasetBuilder.modified(System.currentTimeMillis());
            if (!StringUtils.isEmpty(name)) {
                datasetBuilder.name(name);
            }
            final DataSetMetadata updatedDataSetMetadata = datasetBuilder.build();
            // Save data set content into cache to make sure there's enough space in the content store
            final long maxDataSetSizeAllowed = getMaxDataSetSizeAllowed();
            final StrictlyBoundedInputStream sizeCalculator = new StrictlyBoundedInputStream(dataSetContent, maxDataSetSizeAllowed);
            try (OutputStream cacheEntry = cacheManager.put(cacheKey, TimeToLive.DEFAULT)) {
                IOUtils.copy(sizeCalculator, cacheEntry);
            }
            // once fully copied to the cache, we know for sure that the content store has enough space, so let's copy
            // from the cache to the content store
            PipedInputStream toContentStore = new PipedInputStream();
            PipedOutputStream fromCache = new PipedOutputStream(toContentStore);
            Runnable r = () -> {
                try (final InputStream input = cacheManager.get(cacheKey)) {
                    IOUtils.copy(input, fromCache);
                    // it's important to close this stream, otherwise the piped stream will never close
                    fromCache.close();
                } catch (IOException e) {
                    throw new TDPException(UNABLE_TO_CREATE_OR_UPDATE_DATASET, e);
                }
            };
            executor.execute(r);
            contentStore.storeAsRaw(updatedDataSetMetadata, toContentStore);
            // update the dataset metadata with its new size
            updatedDataSetMetadata.setDataSetSize(sizeCalculator.getTotal());
            dataSetMetadataRepository.save(updatedDataSetMetadata);
            // publishing update event
            publisher.publishEvent(new DatasetUpdatedEvent(updatedDataSetMetadata));
        } catch (StrictlyBoundedInputStream.InputStreamTooLargeException e) {
            LOG.warn("Dataset update {} cannot be done, new content is too big", currentDataSetMetadata.getId());
            throw new TDPException(MAX_STORAGE_MAY_BE_EXCEEDED, e, build().put("limit", e.getMaxSize()));
        } catch (IOException e) {
            LOG.error("Error updating the dataset", e);
            throw new TDPException(UNABLE_TO_CREATE_OR_UPDATE_DATASET, e);
        } finally {
            dataSetContentToNull(dataSetContent);
            // whatever the outcome the cache needs to be cleaned
            if (cacheManager.has(cacheKey)) {
                cacheManager.evict(cacheKey);
            }
            lock.unlock();
        }
        // Content was changed, so queue events (format analysis, content indexing for search...)
        analyzeDataSet(currentDataSetMetadata.getId(), true, emptyList());
        return currentDataSetMetadata.getId();
    }
}
Also used : DataSetMetadataBuilder(org.talend.dataprep.dataset.DataSetMetadataBuilder) PipedInputStream(java.io.PipedInputStream) StrictlyBoundedInputStream(org.talend.dataprep.dataset.store.content.StrictlyBoundedInputStream) InputStream(java.io.InputStream) PipedOutputStream(java.io.PipedOutputStream) NullOutputStream(org.apache.commons.io.output.NullOutputStream) OutputStream(java.io.OutputStream) PipedOutputStream(java.io.PipedOutputStream) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) DataSetMetadata(org.talend.dataprep.api.dataset.DataSetMetadata) TDPException(org.talend.dataprep.exception.TDPException) DistributedLock(org.talend.dataprep.lock.DistributedLock) StrictlyBoundedInputStream(org.talend.dataprep.dataset.store.content.StrictlyBoundedInputStream) DatasetUpdatedEvent(org.talend.dataprep.dataset.event.DatasetUpdatedEvent) UpdateDataSetCacheKey(org.talend.dataprep.dataset.service.cache.UpdateDataSetCacheKey) VolumeMetered(org.talend.dataprep.metrics.VolumeMetered) Timed(org.talend.dataprep.metrics.Timed) ApiOperation(io.swagger.annotations.ApiOperation) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Example 88 with DataSetMetadata

use of org.talend.dataprep.api.dataset.DataSetMetadata in project data-prep by Talend.

the class DataSetService method getDataStoreParameters.

@RequestMapping(value = "/datasets/{id}/datastore/properties", method = GET)
@ApiOperation(value = "Get the dataset import parameters", notes = "This list can be used by user to change dataset encoding.")
@Timed
public // ComponentProperties
Object getDataStoreParameters(@PathVariable("id") final String dataSetId) {
    DataSetMetadata dataSetMetadata = dataSetMetadataRepository.get(dataSetId);
    Object parametersToReturn = null;
    if (dataSetMetadata != null) {
        DataSetLocation matchingDatasetLocation = locationsService.findLocation(dataSetMetadata.getLocation().getLocationType());
        if (matchingDatasetLocation == null) {
            parametersToReturn = emptyList();
        } else {
            if (matchingDatasetLocation.isSchemaOriented()) {
                ComponentProperties parametersAsSchema = matchingDatasetLocation.getParametersAsSchema(getLocale());
                parametersAsSchema.setProperties(dataSetMetadata.getLocation().getParametersAsSchema(getLocale()).getProperties());
                parametersToReturn = parametersAsSchema;
            } else {
                parametersToReturn = matchingDatasetLocation.getParameters(getLocale());
            }
        }
    }
    return parametersToReturn;
}
Also used : DataSetLocation(org.talend.dataprep.api.dataset.DataSetLocation) ComponentProperties(org.talend.dataprep.parameters.jsonschema.ComponentProperties) DataSetMetadata(org.talend.dataprep.api.dataset.DataSetMetadata) Timed(org.talend.dataprep.metrics.Timed) ApiOperation(io.swagger.annotations.ApiOperation) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Example 89 with DataSetMetadata

use of org.talend.dataprep.api.dataset.DataSetMetadata in project data-prep by Talend.

the class DataSetService method create.

/**
 * Creates a new data set and returns the new data set id as text in the response.
 *
 * @param name An optional name for the new data set (might be <code>null</code>).
 * @param size An optional size for the newly created data set.
 * @param contentType the request content type.
 * @param content The raw content of the data set (might be a CSV, XLS...) or the connection parameter in case of a
 * remote csv.
 * @return The new data id.
 * @see DataSetService#get(boolean, boolean, String, String)
 */
// @formatter:off
@RequestMapping(value = "/datasets", method = POST, produces = TEXT_PLAIN_VALUE)
@ApiOperation(value = "Create a data set", produces = TEXT_PLAIN_VALUE, notes = "Create a new data set based on content provided in POST body. For documentation purposes, body is typed as 'text/plain' but operation accepts binary content too. Returns the id of the newly created data set.")
@Timed
@VolumeMetered
public String create(@ApiParam(value = "User readable name of the data set (e.g. 'Finance Report 2015', 'Test Data Set').") @RequestParam(defaultValue = "") String name, @ApiParam(value = "An optional tag to be added in data set metadata once created.") @RequestParam(defaultValue = "") String tag, @ApiParam(value = "Size of the data set, in bytes.") @RequestParam(required = false) Long size, @RequestHeader(CONTENT_TYPE) String contentType, @ApiParam(value = "content") InputStream content) {
    // @formatter:on
    checkDataSetName(name);
    final String id = UUID.randomUUID().toString();
    final Marker marker = Markers.dataset(id);
    LOG.debug(marker, "Creating...");
    // sanity check
    if (size != null && size < 0) {
        LOG.warn("invalid size provided {}", size);
        throw new TDPException(UNEXPECTED_CONTENT, build().put("size", size));
    }
    // check that the name is not already taken
    checkIfNameIsAvailable(name);
    // get the location out of the content type and the request body
    final DataSetLocation location;
    try {
        location = datasetLocator.getDataSetLocation(contentType, content);
    } catch (IOException e) {
        throw new TDPException(DataSetErrorCodes.UNABLE_TO_READ_DATASET_LOCATION, e);
    }
    DataSetMetadata dataSetMetadata = null;
    final TDPException hypotheticalException;
    try {
        // if the size is provided, let's check if the quota will not be exceeded
        if (size != null && size > 0) {
            quotaService.checkIfAddingSizeExceedsAvailableStorage(size);
        }
        dataSetMetadata = // 
        metadataBuilder.metadata().id(// 
        id).name(// 
        name).author(// 
        security.getUserId()).location(// 
        location).created(// 
        System.currentTimeMillis()).tag(// 
        tag).build();
        // Indicate data set is being imported
        dataSetMetadata.getLifecycle().setImporting(true);
        // Save data set content
        LOG.debug(marker, "Storing content...");
        final long maxDataSetSizeAllowed = getMaxDataSetSizeAllowed();
        final StrictlyBoundedInputStream sizeCalculator = new StrictlyBoundedInputStream(content, maxDataSetSizeAllowed);
        contentStore.storeAsRaw(dataSetMetadata, sizeCalculator);
        dataSetMetadata.setDataSetSize(sizeCalculator.getTotal());
        LOG.debug(marker, "Content stored.");
        // Create the new data set
        dataSetMetadataRepository.save(dataSetMetadata);
        LOG.debug(marker, "dataset metadata stored {}", dataSetMetadata);
        // Queue events (format analysis, content indexing for search...)
        analyzeDataSet(id, true, emptyList());
        LOG.debug(marker, "Created!");
        return id;
    } catch (StrictlyBoundedInputStream.InputStreamTooLargeException e) {
        hypotheticalException = new TDPException(MAX_STORAGE_MAY_BE_EXCEEDED, e, build().put("limit", e.getMaxSize()));
    } catch (TDPException e) {
        hypotheticalException = e;
    } catch (Exception e) {
        hypotheticalException = new TDPException(UNABLE_CREATE_DATASET, e);
    } finally {
        // because the client might still be writing the request content, closing the connexion right now
        // might end up in a 'connection reset' or a 'broken pipe' error in API.
        // 
        // So, let's read fully the request content before closing the connection.
        dataSetContentToNull(content);
    }
    dataSetMetadataRepository.remove(id);
    if (dataSetMetadata != null) {
        try {
            contentStore.delete(dataSetMetadata);
        } catch (Exception e) {
            LOG.error("Unable to delete uploaded data.", e);
        }
    }
    throw hypotheticalException;
}
Also used : TDPException(org.talend.dataprep.exception.TDPException) DataSetLocation(org.talend.dataprep.api.dataset.DataSetLocation) StrictlyBoundedInputStream(org.talend.dataprep.dataset.store.content.StrictlyBoundedInputStream) Marker(org.slf4j.Marker) IOException(java.io.IOException) DataSetMetadata(org.talend.dataprep.api.dataset.DataSetMetadata) IOException(java.io.IOException) TDPException(org.talend.dataprep.exception.TDPException) VolumeMetered(org.talend.dataprep.metrics.VolumeMetered) Timed(org.talend.dataprep.metrics.Timed) ApiOperation(io.swagger.annotations.ApiOperation) RequestMapping(org.springframework.web.bind.annotation.RequestMapping)

Example 90 with DataSetMetadata

use of org.talend.dataprep.api.dataset.DataSetMetadata in project data-prep by Talend.

the class DataSetService method preview.

/**
 * Returns preview of the the data set content for given id (first 100 rows). Service might return
 * {@link org.apache.http.HttpStatus#SC_ACCEPTED} if the data set exists but analysis is not yet fully
 * completed so content is not yet ready to be served.
 *
 * @param metadata If <code>true</code>, includes data set metadata information.
 * @param sheetName the sheet name to preview
 * @param dataSetId A data set id.
 */
@RequestMapping(value = "/datasets/{id}/preview", method = RequestMethod.GET)
@ApiOperation(value = "Get a data preview set by id", notes = "Get a data set preview content based on provided id. Not valid or non existing data set id returns empty content. Data set not in drat status will return a redirect 301")
@Timed
@ResponseBody
public DataSet preview(@RequestParam(defaultValue = "true") @ApiParam(name = "metadata", value = "Include metadata information in the response") boolean metadata, @RequestParam(defaultValue = "") @ApiParam(name = "sheetName", value = "Sheet name to preview") String sheetName, @PathVariable(value = "id") @ApiParam(name = "id", value = "Id of the requested data set") String dataSetId) {
    DataSetMetadata dataSetMetadata = dataSetMetadataRepository.get(dataSetId);
    if (dataSetMetadata == null) {
        HttpResponseContext.status(HttpStatus.NO_CONTENT);
        // No data set, returns empty content.
        return DataSet.empty();
    }
    if (!dataSetMetadata.isDraft()) {
        // Moved to get data set content operation
        HttpResponseContext.status(HttpStatus.MOVED_PERMANENTLY);
        HttpResponseContext.header("Location", "/datasets/" + dataSetId + "/content");
        // dataset not anymore a draft so preview doesn't make sense.
        return DataSet.empty();
    }
    if (StringUtils.isNotEmpty(sheetName)) {
        dataSetMetadata.setSheetName(sheetName);
    }
    // take care of previous data without schema parser result
    if (dataSetMetadata.getSchemaParserResult() != null) {
        // sheet not yet set correctly so use the first one
        if (StringUtils.isEmpty(dataSetMetadata.getSheetName())) {
            String theSheetName = dataSetMetadata.getSchemaParserResult().getSheetContents().get(0).getName();
            LOG.debug("preview for dataSetMetadata: {} with sheetName: {}", dataSetId, theSheetName);
            dataSetMetadata.setSheetName(theSheetName);
        }
        String theSheetName = dataSetMetadata.getSheetName();
        Optional<Schema.SheetContent> sheetContentFound = dataSetMetadata.getSchemaParserResult().getSheetContents().stream().filter(sheetContent -> theSheetName.equals(sheetContent.getName())).findFirst();
        if (!sheetContentFound.isPresent()) {
            HttpResponseContext.status(HttpStatus.NO_CONTENT);
            // No sheet found, returns empty content.
            return DataSet.empty();
        }
        List<ColumnMetadata> columnMetadatas = sheetContentFound.get().getColumnMetadatas();
        if (dataSetMetadata.getRowMetadata() == null) {
            dataSetMetadata.setRowMetadata(new RowMetadata(emptyList()));
        }
        dataSetMetadata.getRowMetadata().setColumns(columnMetadatas);
    } else {
        LOG.warn("dataset#{} has draft status but any SchemaParserResult");
    }
    // Build the result
    DataSet dataSet = new DataSet();
    if (metadata) {
        dataSet.setMetadata(conversionService.convert(dataSetMetadata, UserDataSetMetadata.class));
    }
    dataSet.setRecords(contentStore.stream(dataSetMetadata).limit(100));
    return dataSet;
}
Also used : VolumeMetered(org.talend.dataprep.metrics.VolumeMetered) RequestParam(org.springframework.web.bind.annotation.RequestParam) ImportBuilder(org.talend.dataprep.api.dataset.Import.ImportBuilder) FormatFamilyFactory(org.talend.dataprep.schema.FormatFamilyFactory) Autowired(org.springframework.beans.factory.annotation.Autowired) ApiParam(io.swagger.annotations.ApiParam) StringUtils(org.apache.commons.lang3.StringUtils) TEXT_PLAIN_VALUE(org.springframework.http.MediaType.TEXT_PLAIN_VALUE) SortAndOrderHelper.getDataSetMetadataComparator(org.talend.dataprep.util.SortAndOrderHelper.getDataSetMetadataComparator) Collections.singletonList(java.util.Collections.singletonList) SemanticDomain(org.talend.dataprep.api.dataset.statistics.SemanticDomain) BeanConversionService(org.talend.dataprep.conversions.BeanConversionService) PipedInputStream(java.io.PipedInputStream) DistributedLock(org.talend.dataprep.lock.DistributedLock) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) DataprepBundle.message(org.talend.dataprep.i18n.DataprepBundle.message) UserData(org.talend.dataprep.api.user.UserData) TaskExecutor(org.springframework.core.task.TaskExecutor) MAX_STORAGE_MAY_BE_EXCEEDED(org.talend.dataprep.exception.error.DataSetErrorCodes.MAX_STORAGE_MAY_BE_EXCEEDED) DataSet(org.talend.dataprep.api.dataset.DataSet) LocalStoreLocation(org.talend.dataprep.api.dataset.location.LocalStoreLocation) FormatFamily(org.talend.dataprep.schema.FormatFamily) Resource(javax.annotation.Resource) Set(java.util.Set) DatasetUpdatedEvent(org.talend.dataprep.dataset.event.DatasetUpdatedEvent) RestController(org.springframework.web.bind.annotation.RestController) QuotaService(org.talend.dataprep.dataset.store.QuotaService) Stream(java.util.stream.Stream) StreamSupport.stream(java.util.stream.StreamSupport.stream) FlagNames(org.talend.dataprep.api.dataset.row.FlagNames) UNEXPECTED_CONTENT(org.talend.dataprep.exception.error.CommonErrorCodes.UNEXPECTED_CONTENT) Analyzers(org.talend.dataquality.common.inference.Analyzers) DataSetLocatorService(org.talend.dataprep.api.dataset.location.locator.DataSetLocatorService) Callable(java.util.concurrent.Callable) Schema(org.talend.dataprep.schema.Schema) ArrayList(java.util.ArrayList) Value(org.springframework.beans.factory.annotation.Value) RequestBody(org.springframework.web.bind.annotation.RequestBody) DataSetLocationService(org.talend.dataprep.api.dataset.location.DataSetLocationService) AnalyzerService(org.talend.dataprep.quality.AnalyzerService) UserDataRepository(org.talend.dataprep.user.store.UserDataRepository) Markers(org.talend.dataprep.log.Markers) Api(io.swagger.annotations.Api) DraftValidator(org.talend.dataprep.schema.DraftValidator) HttpResponseContext(org.talend.dataprep.http.HttpResponseContext) Sort(org.talend.dataprep.util.SortAndOrderHelper.Sort) IOException(java.io.IOException) PipedOutputStream(java.io.PipedOutputStream) FormatAnalysis(org.talend.dataprep.dataset.service.analysis.synchronous.FormatAnalysis) ContentAnalysis(org.talend.dataprep.dataset.service.analysis.synchronous.ContentAnalysis) SchemaAnalysis(org.talend.dataprep.dataset.service.analysis.synchronous.SchemaAnalysis) HttpStatus(org.springframework.http.HttpStatus) FilterService(org.talend.dataprep.api.filter.FilterService) Marker(org.slf4j.Marker) NullOutputStream(org.apache.commons.io.output.NullOutputStream) StatisticsAdapter(org.talend.dataprep.dataset.StatisticsAdapter) Timed(org.talend.dataprep.metrics.Timed) ColumnMetadata(org.talend.dataprep.api.dataset.ColumnMetadata) PathVariable(org.springframework.web.bind.annotation.PathVariable) DataSetMetadataBuilder(org.talend.dataprep.dataset.DataSetMetadataBuilder) URLDecoder(java.net.URLDecoder) DataSetErrorCodes(org.talend.dataprep.exception.error.DataSetErrorCodes) PUT(org.springframework.web.bind.annotation.RequestMethod.PUT) LoggerFactory(org.slf4j.LoggerFactory) SEMANTIC(org.talend.dataprep.quality.AnalyzerService.Analysis.SEMANTIC) ApiOperation(io.swagger.annotations.ApiOperation) UNABLE_TO_CREATE_OR_UPDATE_DATASET(org.talend.dataprep.exception.error.DataSetErrorCodes.UNABLE_TO_CREATE_OR_UPDATE_DATASET) DataSetRow(org.talend.dataprep.api.dataset.row.DataSetRow) StrictlyBoundedInputStream(org.talend.dataprep.dataset.store.content.StrictlyBoundedInputStream) DataSetMetadata(org.talend.dataprep.api.dataset.DataSetMetadata) UNSUPPORTED_CONTENT(org.talend.dataprep.exception.error.DataSetErrorCodes.UNSUPPORTED_CONTENT) TimeToLive(org.talend.dataprep.cache.ContentCache.TimeToLive) Order(org.talend.dataprep.util.SortAndOrderHelper.Order) Collections.emptyList(java.util.Collections.emptyList) PublicAPI(org.talend.dataprep.security.PublicAPI) RequestMethod(org.springframework.web.bind.annotation.RequestMethod) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) ContentCache(org.talend.dataprep.cache.ContentCache) INVALID_DATASET_NAME(org.talend.dataprep.exception.error.DataSetErrorCodes.INVALID_DATASET_NAME) List(java.util.List) Optional(java.util.Optional) Analyzer(org.talend.dataquality.common.inference.Analyzer) RequestHeader(org.springframework.web.bind.annotation.RequestHeader) Pattern(java.util.regex.Pattern) Security(org.talend.dataprep.security.Security) Spliterator(java.util.Spliterator) RowMetadata(org.talend.dataprep.api.dataset.RowMetadata) ComponentProperties(org.talend.dataprep.parameters.jsonschema.ComponentProperties) TDPException(org.talend.dataprep.exception.TDPException) JsonErrorCodeDescription(org.talend.dataprep.exception.json.JsonErrorCodeDescription) RequestMapping(org.springframework.web.bind.annotation.RequestMapping) UNABLE_CREATE_DATASET(org.talend.dataprep.exception.error.DataSetErrorCodes.UNABLE_CREATE_DATASET) HashMap(java.util.HashMap) GET(org.springframework.web.bind.annotation.RequestMethod.GET) Import(org.talend.dataprep.api.dataset.Import) ExceptionContext.build(org.talend.daikon.exception.ExceptionContext.build) ExceptionContext(org.talend.daikon.exception.ExceptionContext) Charset(java.nio.charset.Charset) UpdateColumnParameters(org.talend.dataprep.dataset.service.api.UpdateColumnParameters) VersionService(org.talend.dataprep.api.service.info.VersionService) POST(org.springframework.web.bind.annotation.RequestMethod.POST) OutputStream(java.io.OutputStream) DataSetLocation(org.talend.dataprep.api.dataset.DataSetLocation) Logger(org.slf4j.Logger) LocaleContextHolder.getLocale(org.springframework.context.i18n.LocaleContextHolder.getLocale) UpdateDataSetCacheKey(org.talend.dataprep.dataset.service.cache.UpdateDataSetCacheKey) IOUtils(org.apache.commons.compress.utils.IOUtils) APPLICATION_JSON_VALUE(org.springframework.http.MediaType.APPLICATION_JSON_VALUE) ResponseBody(org.springframework.web.bind.annotation.ResponseBody) Certification(org.talend.dataprep.api.dataset.DataSetGovernance.Certification) EncodingSupport(org.talend.dataprep.configuration.EncodingSupport) Comparator(java.util.Comparator) InputStream(java.io.InputStream) ColumnMetadata(org.talend.dataprep.api.dataset.ColumnMetadata) DataSet(org.talend.dataprep.api.dataset.DataSet) RowMetadata(org.talend.dataprep.api.dataset.RowMetadata) DataSetMetadata(org.talend.dataprep.api.dataset.DataSetMetadata) Timed(org.talend.dataprep.metrics.Timed) ApiOperation(io.swagger.annotations.ApiOperation) RequestMapping(org.springframework.web.bind.annotation.RequestMapping) ResponseBody(org.springframework.web.bind.annotation.ResponseBody)

Aggregations

DataSetMetadata (org.talend.dataprep.api.dataset.DataSetMetadata)192 Test (org.junit.Test)126 DataSetBaseTest (org.talend.dataprep.dataset.DataSetBaseTest)63 ColumnMetadata (org.talend.dataprep.api.dataset.ColumnMetadata)48 InputStream (java.io.InputStream)45 Matchers.containsString (org.hamcrest.Matchers.containsString)28 Matchers.isEmptyString (org.hamcrest.Matchers.isEmptyString)28 TDPException (org.talend.dataprep.exception.TDPException)26 RowMetadata (org.talend.dataprep.api.dataset.RowMetadata)20 DataSetServiceTest (org.talend.dataprep.dataset.service.DataSetServiceTest)20 ApiOperation (io.swagger.annotations.ApiOperation)18 DataSet (org.talend.dataprep.api.dataset.DataSet)18 Type (org.talend.dataprep.api.type.Type)17 Timed (org.talend.dataprep.metrics.Timed)17 DistributedLock (org.talend.dataprep.lock.DistributedLock)16 Autowired (org.springframework.beans.factory.annotation.Autowired)14 DataSetRow (org.talend.dataprep.api.dataset.row.DataSetRow)14 IOException (java.io.IOException)13 RequestMapping (org.springframework.web.bind.annotation.RequestMapping)13 ArrayList (java.util.ArrayList)12