Search in sources :

Example 1 with DataSet

use of com.thinkbiganalytics.metadata.api.catalog.DataSet in project kylo by Teradata.

the class FeedImporter method validateUserDatasources.

/**
 * Validates that user data sources can be imported with provided properties.
 * Legacy UserDatasources will be remapped toe Catalog DataSources or DataSets
 *
 * @return {@code true} if the feed can be imported, or {@code false} otherwise
 */
private boolean validateUserDatasources() {
    FeedMetadata metadata = importFeed.getFeedToImport();
    final UploadProgressMessage statusMessage = uploadProgressService.addUploadStatus(importFeed.getImportOptions().getUploadKey(), "Validating data sources.");
    // Get data sources needing to be created
    final Set<String> availableDatasources = metadataAccess.read(() -> datasourceProvider.getDatasources(datasourceProvider.datasetCriteria().type(UserDatasource.class)).stream().map(com.thinkbiganalytics.metadata.api.datasource.Datasource::getId).map(Object::toString).collect(Collectors.toSet()));
    final ImportComponentOption componentOption = importFeedOptions.findImportComponentOption(ImportComponent.USER_DATASOURCES);
    if (componentOption.getProperties().isEmpty()) {
        componentOption.setProperties(FeedImportDatasourceUtil.buildDatasourceAssignmentProperties(metadata, availableDatasources));
        // add in any catalogDataSourceIds
        if (metadata.getDataTransformation().getCatalogDataSourceIds() != null && !metadata.getDataTransformation().getCatalogDataSourceIds().isEmpty()) {
            final Set<String> catalogDataSources = metadataAccess.read(() -> catalogDataSourceProvider.findAll().stream().map(com.thinkbiganalytics.metadata.api.catalog.DataSource::getId).map(Object::toString).collect(Collectors.toSet()));
            componentOption.getProperties().addAll(FeedImportDatasourceUtil.buildCatalogDatasourceAssignmentProperties(metadata, catalogDataSources));
        }
    }
    // Update feed with re-mapped data sources
    Map<String, String> chartModelReplacements = new HashMap<>();
    String sourceDataSetIds = metadata.getSourceDataSetIds();
    Set<String> datasetIds = StringUtils.isNotBlank(sourceDataSetIds) ? new HashSet<String>(Arrays.asList(sourceDataSetIds)) : new HashSet<String>();
    final boolean valid = componentOption.getProperties().stream().allMatch(property -> {
        if (property.getPropertyValue() != null) {
            if (property.getAdditionalPropertyValue(FeedImportDatasourceUtil.LEGACY_TABLE_DATA_SOURCE_KEY) != null && "true".equalsIgnoreCase(property.getAdditionalPropertyValue(FeedImportDatasourceUtil.LEGACY_TABLE_DATA_SOURCE_KEY))) {
                // remap
                String table = property.getAdditionalPropertyValue("table");
                String datasourceId = property.getAdditionalPropertyValue("datasourceId");
                String datasetId = property.getPropertyValue();
                com.thinkbiganalytics.kylo.catalog.rest.model.DataSet dataSet = metadataAccess.read(() -> {
                    return catalogDataSetProvider.find(catalogDataSetProvider.resolveId(datasetId)).map(catalogDataSet -> catalogModelTransform.dataSetToRestModel().apply(catalogDataSet)).orElse(null);
                });
                if (dataSet != null) {
                    FeedImportDatasourceUtil.replaceLegacyDataSourceScript(metadata, table, datasourceId, dataSet);
                    datasetIds.add(dataSet.getId());
                    // TODO is this needed?
                    chartModelReplacements.put(datasourceId, dataSet.getDataSource().getId());
                    return true;
                } else {
                    return false;
                }
            }
            if (property.getAdditionalPropertyValue(FeedImportDatasourceUtil.LEGACY_QUERY_DATA_SOURCE_KEY) != null && "true".equalsIgnoreCase(property.getAdditionalPropertyValue(FeedImportDatasourceUtil.LEGACY_QUERY_DATA_SOURCE_KEY))) {
                // remap
                // thre is only 1 datasource throughout, replace the method call and the datasource id with the new one
                String datasourceId = property.getAdditionalPropertyValue("datasourceId");
                String catalogDataSourceId = property.getPropertyValue();
                com.thinkbiganalytics.kylo.catalog.rest.model.DataSource dataSource = metadataAccess.read(() -> {
                    return catalogDataSourceProvider.find(catalogDataSourceProvider.resolveId(catalogDataSourceId)).map(catalogDataSource -> catalogModelTransform.dataSourceToRestModel().apply(catalogDataSource)).orElse(null);
                });
                if (dataSource != null) {
                    FeedImportDatasourceUtil.replaceLegacyQueryDataSourceScript(metadata, datasourceId, dataSource);
                    // TODO is this needed?
                    chartModelReplacements.put(datasourceId, dataSource.getId());
                    return true;
                } else {
                    return false;
                }
            }
            if (property.getAdditionalPropertyValue(FeedImportDatasourceUtil.CATALOG_DATASOURCE_KEY) != null && "true".equalsIgnoreCase(property.getAdditionalPropertyValue(FeedImportDatasourceUtil.CATALOG_DATASOURCE_KEY))) {
                String datasourceId = property.getAdditionalPropertyValue("datasourceId");
                String catalogDataSourceId = property.getPropertyValue();
                com.thinkbiganalytics.kylo.catalog.rest.model.DataSource dataSource = metadataAccess.read(() -> {
                    return catalogDataSourceProvider.find(catalogDataSourceProvider.resolveId(catalogDataSourceId)).map(catalogDataSource -> catalogModelTransform.dataSourceToRestModel().apply(catalogDataSource)).orElse(null);
                });
                if (dataSource != null) {
                    List<String> newIds = metadata.getDataTransformation().getCatalogDataSourceIds().stream().map(id -> id.equalsIgnoreCase(datasourceId) ? catalogDataSourceId : id).collect(Collectors.toList());
                    metadata.getDataTransformation().setCatalogDataSourceIds(newIds);
                    String script = metadata.getDataTransformation().getDataTransformScript();
                    script = script.replaceAll(datasourceId, catalogDataSourceId);
                    metadata.getDataTransformation().setDataTransformScript(script);
                    chartModelReplacements.put(datasourceId, catalogDataSourceId);
                    return true;
                } else {
                    return false;
                }
            } else {
                // ImportUtil.replaceDatasource(metadata, property.getProcessorId(), property.getPropertyValue());
                return false;
            }
        } else {
            return false;
        }
    });
    if (valid) {
        // make the final replacements and add in the sources
        if (datasetIds != null) {
            metadata.setSourceDataSetIds(datasetIds.stream().collect(Collectors.joining(",")));
        }
        FeedImportDatasourceUtil.ensureConnectionKeysMatch(metadata);
        FeedImportDatasourceUtil.replaceChartModelReferences(metadata, chartModelReplacements);
        FeedImportDatasourceUtil.populateFeedDatasourceIdsProperty(metadata);
        statusMessage.update("Validated data sources.", true);
    } else {
        statusMessage.update("Validation Error. Additional properties are needed before uploading the feed.", false);
        importFeed.setValid(false);
    }
    uploadProgressService.completeSection(importFeed.getImportOptions(), ImportSection.Section.VALIDATE_USER_DATASOURCES);
    return valid;
}
Also used : UploadProgressService(com.thinkbiganalytics.feedmgr.service.UploadProgressService) Arrays(java.util.Arrays) RegisteredTemplateService(com.thinkbiganalytics.feedmgr.service.template.RegisteredTemplateService) Category(com.thinkbiganalytics.metadata.api.category.Category) DataSourceProvider(com.thinkbiganalytics.metadata.api.catalog.DataSourceProvider) LoggerFactory(org.slf4j.LoggerFactory) DatasourceModelTransform(com.thinkbiganalytics.feedmgr.service.datasource.DatasourceModelTransform) TemplateImporter(com.thinkbiganalytics.feedmgr.service.template.importing.TemplateImporter) StringUtils(org.apache.commons.lang3.StringUtils) DataSetProvider(com.thinkbiganalytics.metadata.api.catalog.DataSetProvider) ByteArrayInputStream(java.io.ByteArrayInputStream) FeedAccessControl(com.thinkbiganalytics.metadata.api.feed.security.FeedAccessControl) FeedManagerCategoryService(com.thinkbiganalytics.feedmgr.service.category.FeedManagerCategoryService) NifiPropertyUtil(com.thinkbiganalytics.nifi.rest.support.NifiPropertyUtil) Map(java.util.Map) ImportUtil(com.thinkbiganalytics.feedmgr.util.ImportUtil) AccessController(com.thinkbiganalytics.security.AccessController) NifiFeed(com.thinkbiganalytics.feedmgr.rest.model.NifiFeed) CategoryAccessControl(com.thinkbiganalytics.metadata.api.category.security.CategoryAccessControl) FeedServicesAccessControl(com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl) MetadataAccess(com.thinkbiganalytics.metadata.api.MetadataAccess) CatalogModelTransform(com.thinkbiganalytics.kylo.catalog.rest.model.CatalogModelTransform) ZipEntry(java.util.zip.ZipEntry) MetadataService(com.thinkbiganalytics.feedmgr.service.MetadataService) PolicyPropertyTypes(com.thinkbiganalytics.policy.PolicyPropertyTypes) MetadataFieldAnnotationFieldNameResolver(com.thinkbiganalytics.feedmgr.MetadataFieldAnnotationFieldNameResolver) UserField(com.thinkbiganalytics.feedmgr.rest.model.UserField) ImportTemplate(com.thinkbiganalytics.feedmgr.service.template.importing.model.ImportTemplate) NifiProperty(com.thinkbiganalytics.nifi.rest.model.NifiProperty) ImportPropertyBuilder(com.thinkbiganalytics.feedmgr.rest.model.ImportPropertyBuilder) Set(java.util.Set) UploadProgress(com.thinkbiganalytics.feedmgr.rest.model.UploadProgress) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) UploadProgressMessage(com.thinkbiganalytics.feedmgr.rest.model.UploadProgressMessage) ZipFileUtil(com.thinkbiganalytics.feedmgr.support.ZipFileUtil) Objects(java.util.Objects) List(java.util.List) CategoryProvider(com.thinkbiganalytics.metadata.api.category.CategoryProvider) DataSource(com.thinkbiganalytics.metadata.api.catalog.DataSource) ImportTemplateOptions(com.thinkbiganalytics.feedmgr.rest.model.ImportTemplateOptions) ImportFeed(com.thinkbiganalytics.feedmgr.service.feed.importing.model.ImportFeed) ImportFeedException(com.thinkbiganalytics.feedmgr.service.feed.ImportFeedException) ImportComponent(com.thinkbiganalytics.feedmgr.rest.ImportComponent) ZipInputStream(java.util.zip.ZipInputStream) FeedManagerFeedService(com.thinkbiganalytics.feedmgr.service.feed.FeedManagerFeedService) DataSet(com.thinkbiganalytics.metadata.api.catalog.DataSet) HashMap(java.util.HashMap) FeedMetadata(com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata) ImportSection(com.thinkbiganalytics.feedmgr.rest.ImportSection) UserProperty(com.thinkbiganalytics.feedmgr.rest.model.UserProperty) ArrayList(java.util.ArrayList) FeedCategory(com.thinkbiganalytics.feedmgr.rest.model.FeedCategory) HashSet(java.util.HashSet) Inject(javax.inject.Inject) ImportProperty(com.thinkbiganalytics.feedmgr.rest.model.ImportProperty) ImportType(com.thinkbiganalytics.feedmgr.rest.ImportType) ImportFeedOptions(com.thinkbiganalytics.feedmgr.rest.model.ImportFeedOptions) RegisteredTemplate(com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplate) TemplateImporterFactory(com.thinkbiganalytics.feedmgr.service.template.importing.TemplateImporterFactory) ConnectorProvider(com.thinkbiganalytics.metadata.api.catalog.ConnectorProvider) UserDatasource(com.thinkbiganalytics.metadata.api.datasource.UserDatasource) ImportTemplateRoutineFactory(com.thinkbiganalytics.feedmgr.service.template.importing.importprocess.ImportTemplateRoutineFactory) Logger(org.slf4j.Logger) FeedNameUtil(com.thinkbiganalytics.support.FeedNameUtil) ImportTemplateRoutine(com.thinkbiganalytics.feedmgr.service.template.importing.importprocess.ImportTemplateRoutine) IOException(java.io.IOException) ImportException(com.thinkbiganalytics.feedmgr.service.template.importing.ImportException) UserPropertyTransform(com.thinkbiganalytics.feedmgr.service.UserPropertyTransform) DatasourceProvider(com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider) Collections(java.util.Collections) LegacyNifiRestClient(com.thinkbiganalytics.nifi.rest.client.LegacyNifiRestClient) ImportComponentOption(com.thinkbiganalytics.feedmgr.rest.model.ImportComponentOption) InputStream(java.io.InputStream) HashMap(java.util.HashMap) FeedMetadata(com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata) ImportComponentOption(com.thinkbiganalytics.feedmgr.rest.model.ImportComponentOption) UploadProgressMessage(com.thinkbiganalytics.feedmgr.rest.model.UploadProgressMessage)

Example 2 with DataSet

use of com.thinkbiganalytics.metadata.api.catalog.DataSet in project kylo by Teradata.

the class FeedImporter method validateUserDataSets.

/**
 * Validates that user data sets can be imported with provided properties.
 *
 * @return {@code true} if the feed can be imported, or {@code false} otherwise
 */
private boolean validateUserDataSets() {
    List<com.thinkbiganalytics.kylo.catalog.rest.model.DataSet> sourceDataSets = importFeed.getDataSetsToImport();
    if (sourceDataSets != null && !sourceDataSets.isEmpty()) {
        final UploadProgressMessage statusMessage = uploadProgressService.addUploadStatus(importFeed.getImportOptions().getUploadKey(), "Validating data sets.");
        final ImportComponentOption componentOption = importFeedOptions.findImportComponentOption(ImportComponent.USER_DATA_SETS);
        // /Map the orig datasets by their id
        Map<String, com.thinkbiganalytics.kylo.catalog.rest.model.DataSet> origDataSetMap = sourceDataSets.stream().collect(Collectors.toMap(ds -> ds.getId(), ds -> ds));
        // create a copy with the map so it can be modified from the user properties
        Map<String, com.thinkbiganalytics.kylo.catalog.rest.model.DataSet> modifiedDataSetMap = sourceDataSets.stream().collect(Collectors.toMap(ds -> ds.getId(), ds -> new com.thinkbiganalytics.kylo.catalog.rest.model.DataSet(ds)));
        // look at the properties supplied by the user and apply those first
        List<ImportProperty> properties = componentOption.getProperties();
        properties.stream().forEach(importProperty -> {
            if (StringUtils.isNotBlank(importProperty.getPropertyValue())) {
                com.thinkbiganalytics.kylo.catalog.rest.model.DataSet matchingDataSet = modifiedDataSetMap.get(importProperty.getComponentId());
                if (matchingDataSet != null) {
                    matchingDataSet.setId(importProperty.getPropertyValue());
                    log.info("Remap dataset old id: {}, new id: {}, details: {} ", importProperty.getComponentId(), importProperty.getPropertyValue(), importProperty);
                }
            }
        });
        FeedMetadata metadata = importFeed.getFeedToImport();
        // find the data sets that need importing
        Map<String, Map<String, String>> datasetAdditionalProperties = new HashMap<>();
        // find schemas associated with data set for data transform feeds
        if (metadata.getDataTransformation() != null && StringUtils.isNotBlank(metadata.getDataTransformation().getDataTransformScript())) {
            List<Map<String, Object>> nodes = (List<Map<String, Object>>) metadata.getDataTransformation().getChartViewModel().get("nodes");
            if (nodes != null) {
                nodes.stream().forEach((nodeMap) -> {
                    Map<String, Object> nodeDataSetMap = (Map<String, Object>) nodeMap.get("dataset");
                    if (nodeDataSetMap != null) {
                        String dataSetId = (String) nodeDataSetMap.get("id");
                        List<Map<String, String>> schema = (List<Map<String, String>>) nodeDataSetMap.get("schema");
                        if (schema != null) {
                            String schemaString = schema.stream().map(field -> {
                                Map<String, String> fieldMap = (Map<String, String>) field;
                                String name = fieldMap.get("name");
                                String dataType = fieldMap.get("dataType");
                                return name + " " + dataType;
                            }).collect(Collectors.joining(","));
                            // find the property associated with this dataset and add the schema as an additional property
                            datasetAdditionalProperties.computeIfAbsent(dataSetId, dsId -> new HashMap<String, String>()).put("schema", schemaString);
                        }
                    }
                });
            }
        }
        // create a map of the zip file datasets and the matching system datasets
        Map<com.thinkbiganalytics.kylo.catalog.rest.model.DataSet, com.thinkbiganalytics.kylo.catalog.rest.model.DataSet> importDataSetIdMap = new HashMap<>();
        // attempt to find the dataset and associate it with the incoming one
        sourceDataSets.stream().forEach(dataSet -> {
            com.thinkbiganalytics.kylo.catalog.rest.model.DataSet modifiedDataSet = modifiedDataSetMap.get(dataSet.getId());
            importDataSetIdMap.put(dataSet, findMatchingDataSet(modifiedDataSet));
        });
        // the list of properties to be returned to the user to reassign datasets
        List<ImportProperty> dataSetProperties = new ArrayList<>();
        boolean valid = true;
        // for all the values that are null they need to be created, otherwise we have what we need
        // if the value in the map is null, we need to ask the user to supply a dataset.  Create the ImportProperty and mark as invalid
        importDataSetIdMap.entrySet().stream().forEach(entry -> {
            com.thinkbiganalytics.kylo.catalog.rest.model.DataSet incomingDataSet = entry.getKey();
            com.thinkbiganalytics.kylo.catalog.rest.model.DataSet matchingDataSet = entry.getValue();
            String datasetPathTitle = incomingDataSet.getPaths().stream().collect(Collectors.joining(","));
            String title = incomingDataSet.getDataSource().getTitle();
            ImportProperty property = ImportPropertyBuilder.anImportProperty().withComponentName(title).withDisplayName(datasetPathTitle).withPropertyKey("dataset_" + UUID.randomUUID().toString().replaceAll("-", "_")).withDescription(datasetPathTitle).withComponentId(incomingDataSet.getId()).withImportComponent(ImportComponent.USER_DATA_SETS).asValid(matchingDataSet != null).withAdditionalProperties(datasetAdditionalProperties.get(incomingDataSet.getId())).putAdditionalProperty("dataset", "true").build();
            dataSetProperties.add(property);
            componentOption.setValidForImport(property.isValid());
        });
        componentOption.setProperties(dataSetProperties);
        // mark the component as valid only if the dataset properties are all valid
        componentOption.setValidForImport(dataSetProperties.stream().allMatch(ImportProperty::isValid));
        if (componentOption.isValidForImport()) {
            // replace the source datasets with the found ones
            metadata.setSourceDataSets(new ArrayList<>(importDataSetIdMap.values()));
            Set<String> datasourceIds = new HashSet<>();
            Map<String, String> chartModelReplacements = new HashMap<>();
            // replace the Data Transformation dataset references with the new one
            if (metadata.getDataTransformation() != null && StringUtils.isNotBlank(metadata.getDataTransformation().getDataTransformScript())) {
                String script = metadata.getDataTransformation().getDataTransformScript();
                for (Map.Entry<com.thinkbiganalytics.kylo.catalog.rest.model.DataSet, com.thinkbiganalytics.kylo.catalog.rest.model.DataSet> entry : importDataSetIdMap.entrySet()) {
                    com.thinkbiganalytics.kylo.catalog.rest.model.DataSet incomingDataSet = entry.getKey();
                    com.thinkbiganalytics.kylo.catalog.rest.model.DataSet matchingDataSet = entry.getValue();
                    if (!incomingDataSet.getId().equalsIgnoreCase(matchingDataSet.getId())) {
                        script = script.replaceAll(incomingDataSet.getId(), matchingDataSet.getId());
                        chartModelReplacements.put(incomingDataSet.getId(), matchingDataSet.getId());
                        chartModelReplacements.put(incomingDataSet.getDataSource().getId(), matchingDataSet.getDataSource().getId());
                    }
                    datasourceIds.add(matchingDataSet.getDataSource().getId());
                    metadata.getDataTransformation().setDatasourceIds(new ArrayList<>(datasourceIds));
                }
                metadata.getDataTransformation().setDataTransformScript(script);
                FeedImportDatasourceUtil.replaceChartModelReferences(metadata, chartModelReplacements);
            }
            statusMessage.update("Validated data sets.", true);
        } else {
            statusMessage.update("Validation Error. Additional properties are needed before uploading the feed.", false);
            importFeed.setValid(false);
        }
        uploadProgressService.completeSection(importFeed.getImportOptions(), ImportSection.Section.VALIDATE_USER_DATASOURCES);
        return componentOption.isValidForImport();
    }
    return true;
}
Also used : UploadProgressService(com.thinkbiganalytics.feedmgr.service.UploadProgressService) Arrays(java.util.Arrays) RegisteredTemplateService(com.thinkbiganalytics.feedmgr.service.template.RegisteredTemplateService) Category(com.thinkbiganalytics.metadata.api.category.Category) DataSourceProvider(com.thinkbiganalytics.metadata.api.catalog.DataSourceProvider) LoggerFactory(org.slf4j.LoggerFactory) DatasourceModelTransform(com.thinkbiganalytics.feedmgr.service.datasource.DatasourceModelTransform) TemplateImporter(com.thinkbiganalytics.feedmgr.service.template.importing.TemplateImporter) StringUtils(org.apache.commons.lang3.StringUtils) DataSetProvider(com.thinkbiganalytics.metadata.api.catalog.DataSetProvider) ByteArrayInputStream(java.io.ByteArrayInputStream) FeedAccessControl(com.thinkbiganalytics.metadata.api.feed.security.FeedAccessControl) FeedManagerCategoryService(com.thinkbiganalytics.feedmgr.service.category.FeedManagerCategoryService) NifiPropertyUtil(com.thinkbiganalytics.nifi.rest.support.NifiPropertyUtil) Map(java.util.Map) ImportUtil(com.thinkbiganalytics.feedmgr.util.ImportUtil) AccessController(com.thinkbiganalytics.security.AccessController) NifiFeed(com.thinkbiganalytics.feedmgr.rest.model.NifiFeed) CategoryAccessControl(com.thinkbiganalytics.metadata.api.category.security.CategoryAccessControl) FeedServicesAccessControl(com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl) MetadataAccess(com.thinkbiganalytics.metadata.api.MetadataAccess) CatalogModelTransform(com.thinkbiganalytics.kylo.catalog.rest.model.CatalogModelTransform) ZipEntry(java.util.zip.ZipEntry) MetadataService(com.thinkbiganalytics.feedmgr.service.MetadataService) PolicyPropertyTypes(com.thinkbiganalytics.policy.PolicyPropertyTypes) MetadataFieldAnnotationFieldNameResolver(com.thinkbiganalytics.feedmgr.MetadataFieldAnnotationFieldNameResolver) UserField(com.thinkbiganalytics.feedmgr.rest.model.UserField) ImportTemplate(com.thinkbiganalytics.feedmgr.service.template.importing.model.ImportTemplate) NifiProperty(com.thinkbiganalytics.nifi.rest.model.NifiProperty) ImportPropertyBuilder(com.thinkbiganalytics.feedmgr.rest.model.ImportPropertyBuilder) Set(java.util.Set) UploadProgress(com.thinkbiganalytics.feedmgr.rest.model.UploadProgress) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) UploadProgressMessage(com.thinkbiganalytics.feedmgr.rest.model.UploadProgressMessage) ZipFileUtil(com.thinkbiganalytics.feedmgr.support.ZipFileUtil) Objects(java.util.Objects) List(java.util.List) CategoryProvider(com.thinkbiganalytics.metadata.api.category.CategoryProvider) DataSource(com.thinkbiganalytics.metadata.api.catalog.DataSource) ImportTemplateOptions(com.thinkbiganalytics.feedmgr.rest.model.ImportTemplateOptions) ImportFeed(com.thinkbiganalytics.feedmgr.service.feed.importing.model.ImportFeed) ImportFeedException(com.thinkbiganalytics.feedmgr.service.feed.ImportFeedException) ImportComponent(com.thinkbiganalytics.feedmgr.rest.ImportComponent) ZipInputStream(java.util.zip.ZipInputStream) FeedManagerFeedService(com.thinkbiganalytics.feedmgr.service.feed.FeedManagerFeedService) DataSet(com.thinkbiganalytics.metadata.api.catalog.DataSet) HashMap(java.util.HashMap) FeedMetadata(com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata) ImportSection(com.thinkbiganalytics.feedmgr.rest.ImportSection) UserProperty(com.thinkbiganalytics.feedmgr.rest.model.UserProperty) ArrayList(java.util.ArrayList) FeedCategory(com.thinkbiganalytics.feedmgr.rest.model.FeedCategory) HashSet(java.util.HashSet) Inject(javax.inject.Inject) ImportProperty(com.thinkbiganalytics.feedmgr.rest.model.ImportProperty) ImportType(com.thinkbiganalytics.feedmgr.rest.ImportType) ImportFeedOptions(com.thinkbiganalytics.feedmgr.rest.model.ImportFeedOptions) RegisteredTemplate(com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplate) TemplateImporterFactory(com.thinkbiganalytics.feedmgr.service.template.importing.TemplateImporterFactory) ConnectorProvider(com.thinkbiganalytics.metadata.api.catalog.ConnectorProvider) UserDatasource(com.thinkbiganalytics.metadata.api.datasource.UserDatasource) ImportTemplateRoutineFactory(com.thinkbiganalytics.feedmgr.service.template.importing.importprocess.ImportTemplateRoutineFactory) Logger(org.slf4j.Logger) FeedNameUtil(com.thinkbiganalytics.support.FeedNameUtil) ImportTemplateRoutine(com.thinkbiganalytics.feedmgr.service.template.importing.importprocess.ImportTemplateRoutine) IOException(java.io.IOException) ImportException(com.thinkbiganalytics.feedmgr.service.template.importing.ImportException) UserPropertyTransform(com.thinkbiganalytics.feedmgr.service.UserPropertyTransform) DatasourceProvider(com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider) Collections(java.util.Collections) LegacyNifiRestClient(com.thinkbiganalytics.nifi.rest.client.LegacyNifiRestClient) ImportComponentOption(com.thinkbiganalytics.feedmgr.rest.model.ImportComponentOption) InputStream(java.io.InputStream) DataSet(com.thinkbiganalytics.metadata.api.catalog.DataSet) HashMap(java.util.HashMap) FeedMetadata(com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata) ArrayList(java.util.ArrayList) List(java.util.List) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) ImportComponentOption(com.thinkbiganalytics.feedmgr.rest.model.ImportComponentOption) UploadProgressMessage(com.thinkbiganalytics.feedmgr.rest.model.UploadProgressMessage) ImportProperty(com.thinkbiganalytics.feedmgr.rest.model.ImportProperty) Map(java.util.Map) HashMap(java.util.HashMap)

Example 3 with DataSet

use of com.thinkbiganalytics.metadata.api.catalog.DataSet in project kylo by Teradata.

the class JcrDataSetProviderTest method testFindPage.

@Test(dependsOnMethods = "testDelete")
public void testFindPage() {
    metadata.read(() -> {
        Page<DataSet> conns = this.dataSetProvider.findPage(new PageRequest(0, 5), null);
        assertThat(conns).isNotNull();
        assertThat(conns).hasSize(5);
        assertThat(conns).extracting("title", "description").contains(dataSetTuple(1), dataSetTuple(2), dataSetTuple(3), dataSetTuple(4), dataSetTuple(5));
    }, MetadataAccess.SERVICE);
}
Also used : PageRequest(org.springframework.data.domain.PageRequest) DataSet(com.thinkbiganalytics.metadata.api.catalog.DataSet) Test(org.testng.annotations.Test) SpringBootTest(org.springframework.boot.test.context.SpringBootTest)

Example 4 with DataSet

use of com.thinkbiganalytics.metadata.api.catalog.DataSet in project kylo by Teradata.

the class JcrDataSetProviderTest method testCreate.

@Test
public void testCreate() {
    metadata.commit(() -> {
        DataSet dset = null;
        for (int srcIdx = 0; srcIdx < 2; srcIdx++) {
            for (int dsIdx = 1; dsIdx <= COUNT + 1; dsIdx++) {
                dset = createDataSet(this.dSrcIds.get(srcIdx), srcIdx * 10 + dsIdx);
            }
            this.dSetIds.add(dset.getId());
        }
    }, MetadataAccess.SERVICE);
    metadata.read(() -> {
        Optional<DataSet> dset0 = this.dataSetProvider.find(this.dSetIds.get(0));
        assertThat(dset0).isNotNull().isPresent();
        assertThat(dset0.get()).extracting("title", "description").contains(dataSetTuple(COUNT + 1).toArray());
        assertThat(dset0.get().getDataSource()).isNotNull().extracting("id").contains(this.dSrcIds.get(0));
        Optional<DataSet> dset1 = this.dataSetProvider.find(this.dSetIds.get(1));
        assertThat(dset1).isNotNull().isPresent();
        assertThat(dset1.get()).extracting("title", "description").contains(dataSetTuple(10 + COUNT + 1).toArray());
        assertThat(dset1.get().getDataSource()).isNotNull().extracting("id").contains(this.dSrcIds.get(1));
    }, MetadataAccess.SERVICE);
}
Also used : DataSet(com.thinkbiganalytics.metadata.api.catalog.DataSet) Test(org.testng.annotations.Test) SpringBootTest(org.springframework.boot.test.context.SpringBootTest)

Example 5 with DataSet

use of com.thinkbiganalytics.metadata.api.catalog.DataSet in project kylo by Teradata.

the class DefaultFeedExporter method exportFeed.

/**
 * Export a feed as a zip file
 *
 * @param feedId the id {@link Feed#getId()} of the feed to export
 * @return object containing the zip file with data about the feed.
 */
@Override
public ExportFeed exportFeed(String feedId) throws IOException {
    this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.EXPORT_FEEDS);
    this.metadataService.checkFeedPermission(feedId, FeedAccessControl.EXPORT);
    // Prepare feed metadata
    final FeedMetadata feed = metadataService.getFeedById(feedId);
    if (feed == null) {
        // feed will not be found when user is allowed to export feeds but has no entity access to feed with feed id
        throw new NotFoundException("Feed not found for id " + feedId);
    }
    /*
        final List<Datasource> userDatasources = Optional.ofNullable(feed.getDataTransformation())
            .map(FeedDataTransformation::getDatasourceIds)
            .map(datasourceIds -> metadataAccess.read(
                () ->
                    datasourceIds.stream()
                        .map(datasourceProvider::resolve)
                        .map(datasourceProvider::getDatasource)
                        .map(domain -> datasourceTransform.toDatasource(domain, DatasourceModelTransform.Level.FULL))
                        .map(datasource -> {
                            // Clear sensitive fields
                            datasource.getDestinationForFeeds().clear();
                            datasource.getSourceForFeeds().clear();
                            return datasource;
                        })
                        .collect(Collectors.toList())
                 )
            )
            .orElse(null);
        if (userDatasources != null && !userDatasources.isEmpty()) {
            this.accessController.checkPermission(AccessController.SERVICES, FeedServicesAccessControl.ACCESS_DATASOURCES);
            feed.setUserDatasources(userDatasources);
        }
        */
    // export the datasets ???
    String dataSetJSON = "[]";
    if (!feed.getSourceDataSets().isEmpty()) {
        // refetch them?
        List<com.thinkbiganalytics.kylo.catalog.rest.model.DataSet> dataSets = feed.getSourceDataSets().stream().map(dataSet -> {
            return metadataAccess.read(() -> {
                return dataSetProvider.find(dataSetProvider.resolveId(dataSet.getId())).map(ds -> catalogModelTransform.dataSetToRestModel().apply(ds)).orElse(null);
            });
        }).filter(dataSet -> dataSet != null).collect(Collectors.toList());
        dataSetJSON = ObjectMapperSerializer.serialize(dataSets);
    }
    // Add feed json to template zip file
    final ExportTemplate exportTemplate = templateExporter.exportTemplateForFeedExport(feed.getTemplateId());
    final String feedJson = ObjectMapperSerializer.serialize(feed);
    byte[] zipFile = ZipFileUtil.addToZip(exportTemplate.getFile(), feedJson, ImportFeed.FEED_JSON_FILE);
    final byte[] zipFile2 = ZipFileUtil.addToZip(zipFile, dataSetJSON, ImportFeed.FEED_DATASETS_FILE);
    return new ExportFeed(feed.getSystemFeedName() + ".feed.zip", zipFile2);
}
Also used : FeedDataTransformation(com.thinkbiganalytics.feedmgr.rest.model.FeedDataTransformation) Feed(com.thinkbiganalytics.metadata.api.feed.Feed) ExportFeed(com.thinkbiganalytics.metadata.api.feed.export.ExportFeed) DatasourceModelTransform(com.thinkbiganalytics.feedmgr.service.datasource.DatasourceModelTransform) DataSet(com.thinkbiganalytics.metadata.api.catalog.DataSet) FeedMetadata(com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata) DefaultTemplateExporter(com.thinkbiganalytics.feedmgr.service.template.exporting.DefaultTemplateExporter) Inject(javax.inject.Inject) ObjectMapperSerializer(com.thinkbiganalytics.json.ObjectMapperSerializer) DataSetProvider(com.thinkbiganalytics.metadata.api.catalog.DataSetProvider) FeedAccessControl(com.thinkbiganalytics.metadata.api.feed.security.FeedAccessControl) AccessController(com.thinkbiganalytics.security.AccessController) FeedServicesAccessControl(com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl) MetadataAccess(com.thinkbiganalytics.metadata.api.MetadataAccess) CatalogModelTransform(com.thinkbiganalytics.kylo.catalog.rest.model.CatalogModelTransform) MetadataService(com.thinkbiganalytics.feedmgr.service.MetadataService) Datasource(com.thinkbiganalytics.metadata.rest.model.data.Datasource) TemplateExporter(com.thinkbiganalytics.metadata.api.template.export.TemplateExporter) IOException(java.io.IOException) FeedExporter(com.thinkbiganalytics.metadata.api.feed.export.FeedExporter) Collectors(java.util.stream.Collectors) NotFoundException(javax.ws.rs.NotFoundException) ZipFileUtil(com.thinkbiganalytics.feedmgr.support.ZipFileUtil) ExportTemplate(com.thinkbiganalytics.metadata.api.template.export.ExportTemplate) List(java.util.List) ImportFeed(com.thinkbiganalytics.feedmgr.service.feed.importing.model.ImportFeed) Optional(java.util.Optional) DatasourceProvider(com.thinkbiganalytics.metadata.api.datasource.DatasourceProvider) ExportFeed(com.thinkbiganalytics.metadata.api.feed.export.ExportFeed) DataSet(com.thinkbiganalytics.metadata.api.catalog.DataSet) FeedMetadata(com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata) NotFoundException(javax.ws.rs.NotFoundException) ExportTemplate(com.thinkbiganalytics.metadata.api.template.export.ExportTemplate)

Aggregations

DataSet (com.thinkbiganalytics.metadata.api.catalog.DataSet)5 FeedMetadata (com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata)3 FeedServicesAccessControl (com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl)3 MetadataService (com.thinkbiganalytics.feedmgr.service.MetadataService)3 DatasourceModelTransform (com.thinkbiganalytics.feedmgr.service.datasource.DatasourceModelTransform)3 ImportFeed (com.thinkbiganalytics.feedmgr.service.feed.importing.model.ImportFeed)3 ZipFileUtil (com.thinkbiganalytics.feedmgr.support.ZipFileUtil)3 CatalogModelTransform (com.thinkbiganalytics.kylo.catalog.rest.model.CatalogModelTransform)3 MetadataAccess (com.thinkbiganalytics.metadata.api.MetadataAccess)3 Sets (com.google.common.collect.Sets)2 MetadataFieldAnnotationFieldNameResolver (com.thinkbiganalytics.feedmgr.MetadataFieldAnnotationFieldNameResolver)2 ImportComponent (com.thinkbiganalytics.feedmgr.rest.ImportComponent)2 ImportSection (com.thinkbiganalytics.feedmgr.rest.ImportSection)2 ImportType (com.thinkbiganalytics.feedmgr.rest.ImportType)2 FeedCategory (com.thinkbiganalytics.feedmgr.rest.model.FeedCategory)2 ImportComponentOption (com.thinkbiganalytics.feedmgr.rest.model.ImportComponentOption)2 ImportFeedOptions (com.thinkbiganalytics.feedmgr.rest.model.ImportFeedOptions)2 ImportProperty (com.thinkbiganalytics.feedmgr.rest.model.ImportProperty)2 ImportPropertyBuilder (com.thinkbiganalytics.feedmgr.rest.model.ImportPropertyBuilder)2 ImportTemplateOptions (com.thinkbiganalytics.feedmgr.rest.model.ImportTemplateOptions)2