Search in sources :

Example 1 with StringUtils.isBlank

use of org.apache.commons.lang3.StringUtils.isBlank in project kylo by Teradata.

the class AbstractMergeTable method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLog();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final String blockingValue = context.getProperty(BLOCKING_KEY).evaluateAttributeExpressions(flowFile).getValue();
    String flowFileId = flowFile.getAttribute(CoreAttributes.UUID.key());
    boolean block = false;
    if (blocking && blockingCache.putIfAbsent(blockingValue, flowFileId) != null) {
        if (StringUtils.isBlank(flowFile.getAttribute(BLOCKED_START_TIME))) {
            flowFile = session.putAttribute(flowFile, BLOCKED_START_TIME, String.valueOf(System.currentTimeMillis()));
            getLogger().info("Transferring Flow file {} to blocked relationship", new Object[] { flowFile });
        }
        // penalize the flow file and transfer to BLOCKED
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_BLOCKED);
        return;
    }
    // Add Blocking time to flow file if this was a blocked flowfile.
    if (blocking && StringUtils.isNotBlank(flowFile.getAttribute(BLOCKED_START_TIME))) {
        String blockedStartTime = flowFile.getAttribute(BLOCKED_START_TIME);
        try {
            Long l = Long.parseLong(blockedStartTime);
            Long blockTime = System.currentTimeMillis() - l;
            getLogger().info("Processing Blocked flow file {}.  This was blocked for {} ms", new Object[] { flowFile, blockTime });
            flowFile = session.putAttribute(flowFile, BLOCKED_TIME, String.valueOf(blockTime) + " ms");
        } catch (NumberFormatException e) {
        }
    }
    String PROVENANCE_EXECUTION_STATUS_KEY = context.getName() + " Execution Status";
    String partitionSpecString = context.getProperty(PARTITION_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue();
    String sourceSchema = context.getProperty(SOURCE_SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
    String sourceTable = context.getProperty(SOURCE_TABLE).evaluateAttributeExpressions(flowFile).getValue();
    String targetSchema = context.getProperty(TARGET_SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
    String targetTable = context.getProperty(TARGET_TABLE).evaluateAttributeExpressions(flowFile).getValue();
    String feedPartitionValue = context.getProperty(FEED_PARTITION).evaluateAttributeExpressions(flowFile).getValue();
    String mergeStrategyValue = context.getProperty(MERGE_STRATEGY).evaluateAttributeExpressions(flowFile).getValue();
    String hiveConfigurations = context.getProperty(HIVE_CONFIGURATIONS).evaluateAttributeExpressions(flowFile).getValue();
    boolean resetHive = context.getProperty(RESET_HIVE).asBoolean();
    final ColumnSpec[] columnSpecs = Optional.ofNullable(context.getProperty(FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
    if (STRATEGY_PK_MERGE.equals(mergeStrategyValue) && (columnSpecs == null || columnSpecs.length == 0)) {
        getLog().error("Missing required field specification for PK merge feature");
        flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Failed: Missing required field specification for PK merge feature");
        release(blockingValue);
        session.transfer(flowFile, IngestProperties.REL_FAILURE);
        return;
    }
    // Maintain default for backward compatibility
    if (StringUtils.isEmpty(mergeStrategyValue)) {
        mergeStrategyValue = STRATEGY_DEDUPE_MERGE;
    }
    logger.info("Merge strategy: " + mergeStrategyValue + " Using Source: " + sourceTable + " Target: " + targetTable + " feed partition:" + feedPartitionValue + " partSpec: " + partitionSpecString);
    final StopWatch stopWatch = new StopWatch(true);
    try (final Connection conn = getConnection(context)) {
        TableMergeSyncSupport mergeSupport = new TableMergeSyncSupport(conn);
        if (resetHive) {
            mergeSupport.resetHiveConf();
        }
        mergeSupport.enableDynamicPartitions();
        if (StringUtils.isNotEmpty(hiveConfigurations)) {
            mergeSupport.setHiveConf(hiveConfigurations.split("\\|"));
        }
        PartitionSpec partitionSpec = new PartitionSpec(partitionSpecString);
        if (STRATEGY_DEDUPE_MERGE.equals(mergeStrategyValue)) {
            mergeSupport.doMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, true);
        } else if (STRATEGY_MERGE.equals(mergeStrategyValue)) {
            mergeSupport.doMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, false);
        } else if (STRATEGY_SYNC.equals(mergeStrategyValue)) {
            mergeSupport.doSync(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue);
        } else if (STRATEGY_ROLLING_SYNC.equals(mergeStrategyValue)) {
            mergeSupport.doRollingSync(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue);
        } else if (STRATEGY_PK_MERGE.equals(mergeStrategyValue)) {
            mergeSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, columnSpecs);
        } else {
            throw new UnsupportedOperationException("Failed to resolve the merge strategy");
        }
        stopWatch.stop();
        session.getProvenanceReporter().modifyContent(flowFile, "Execution completed", stopWatch.getElapsed(TimeUnit.MILLISECONDS));
        flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Successful");
        release(blockingValue);
        session.transfer(flowFile, REL_SUCCESS);
    } catch (final Exception e) {
        logger.error("Unable to execute merge doMerge for {} due to {}; routing to failure", new Object[] { flowFile, e }, e);
        flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Failed: " + e.getMessage());
        release(blockingValue);
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) ColumnSpec(com.thinkbiganalytics.util.ColumnSpec) Connection(java.sql.Connection) ComponentLog(org.apache.nifi.logging.ComponentLog) PartitionSpec(com.thinkbiganalytics.util.PartitionSpec) ProcessException(org.apache.nifi.processor.exception.ProcessException) StopWatch(org.apache.nifi.util.StopWatch) StringUtils(org.apache.commons.lang3.StringUtils) TableMergeSyncSupport(com.thinkbiganalytics.ingest.TableMergeSyncSupport)

Example 2 with StringUtils.isBlank

use of org.apache.commons.lang3.StringUtils.isBlank in project kylo by Teradata.

the class ConfigurationPropertyReplacer method replaceControllerServiceProperties.

/**
 * This will replace the Map of Properties in the DTO but not persist back to Nifi.  You need to call the rest client to persist the change
 *
 * @param controllerServiceDTO        the controller service
 * @param properties                  the properties to set
 * @param propertyDescriptorTransform transformer
 * @return {@code true} if the properties were updated, {@code false} if not
 */
public static boolean replaceControllerServiceProperties(ControllerServiceDTO controllerServiceDTO, Map<String, String> properties, NiFiPropertyDescriptorTransform propertyDescriptorTransform) {
    Set<String> changedProperties = new HashSet<>();
    if (controllerServiceDTO != null) {
        // check both Nifis Internal Key name as well as the Displayname to match the properties
        CaseInsensitiveMap propertyMap = new CaseInsensitiveMap(properties);
        Map<String, String> controllerServiceProperties = controllerServiceDTO.getProperties();
        controllerServiceProperties.entrySet().stream().filter(entry -> (propertyMap.containsKey(entry.getKey()) || (controllerServiceDTO.getDescriptors().get(entry.getKey()) != null && propertyMap.containsKey(controllerServiceDTO.getDescriptors().get(entry.getKey()).getDisplayName().toLowerCase())))).forEach(entry -> {
            boolean isSensitive = propertyDescriptorTransform.isSensitive(controllerServiceDTO.getDescriptors().get(entry.getKey()));
            String value = (String) propertyMap.get(entry.getKey());
            if (StringUtils.isBlank(value)) {
                value = (String) propertyMap.get(controllerServiceDTO.getDescriptors().get(entry.getKey()).getDisplayName().toLowerCase());
            }
            if (!isSensitive || (isSensitive && StringUtils.isNotBlank(value))) {
                entry.setValue(value);
                changedProperties.add(entry.getKey());
            }
        });
    }
    return !changedProperties.isEmpty();
}
Also used : CaseInsensitiveMap(org.apache.commons.collections.map.CaseInsensitiveMap) ControllerServiceDTO(org.apache.nifi.web.api.dto.ControllerServiceDTO) NifiProperty(com.thinkbiganalytics.nifi.rest.model.NifiProperty) Set(java.util.Set) StringUtils(org.apache.commons.lang3.StringUtils) Collectors(java.util.stream.Collectors) NiFiPropertyDescriptorTransform(com.thinkbiganalytics.nifi.rest.model.NiFiPropertyDescriptorTransform) HashSet(java.util.HashSet) List(java.util.List) Matcher(java.util.regex.Matcher) Map(java.util.Map) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) CaseInsensitiveMap(org.apache.commons.collections.map.CaseInsensitiveMap) HashSet(java.util.HashSet)

Example 3 with StringUtils.isBlank

use of org.apache.commons.lang3.StringUtils.isBlank in project nifi by apache.

the class Notify method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();
    final PropertyValue signalIdProperty = context.getProperty(RELEASE_SIGNAL_IDENTIFIER);
    final PropertyValue counterNameProperty = context.getProperty(SIGNAL_COUNTER_NAME);
    final PropertyValue deltaProperty = context.getProperty(SIGNAL_COUNTER_DELTA);
    final String attributeCacheRegex = context.getProperty(ATTRIBUTE_CACHE_REGEX).getValue();
    final Integer bufferCount = context.getProperty(SIGNAL_BUFFER_COUNT).asInteger();
    // the cache client used to interact with the distributed cache.
    final AtomicDistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(AtomicDistributedMapCacheClient.class);
    final WaitNotifyProtocol protocol = new WaitNotifyProtocol(cache);
    final Map<String, SignalBuffer> signalBuffers = new HashMap<>();
    for (int i = 0; i < bufferCount; i++) {
        final FlowFile flowFile = session.get();
        if (flowFile == null) {
            break;
        }
        // Signal id is computed from attribute 'RELEASE_SIGNAL_IDENTIFIER' with expression language support
        final String signalId = signalIdProperty.evaluateAttributeExpressions(flowFile).getValue();
        // if the computed value is null, or empty, we transfer the flow file to failure relationship
        if (StringUtils.isBlank(signalId)) {
            logger.error("FlowFile {} has no attribute for given Release Signal Identifier", new Object[] { flowFile });
            // set 'notified' attribute
            session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
            continue;
        }
        String counterName = counterNameProperty.evaluateAttributeExpressions(flowFile).getValue();
        if (StringUtils.isEmpty(counterName)) {
            counterName = WaitNotifyProtocol.DEFAULT_COUNT_NAME;
        }
        int delta = 1;
        if (deltaProperty.isSet()) {
            final String deltaStr = deltaProperty.evaluateAttributeExpressions(flowFile).getValue();
            try {
                delta = Integer.parseInt(deltaStr);
            } catch (final NumberFormatException e) {
                logger.error("Failed to calculate delta for FlowFile {} due to {}", new Object[] { flowFile, e }, e);
                session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
                continue;
            }
        }
        if (!signalBuffers.containsKey(signalId)) {
            signalBuffers.put(signalId, new SignalBuffer());
        }
        final SignalBuffer signalBuffer = signalBuffers.get(signalId);
        if (StringUtils.isNotEmpty(attributeCacheRegex)) {
            flowFile.getAttributes().entrySet().stream().filter(e -> (!e.getKey().equals("uuid") && e.getKey().matches(attributeCacheRegex))).forEach(e -> signalBuffer.attributesToCache.put(e.getKey(), e.getValue()));
        }
        signalBuffer.incrementDelta(counterName, delta);
        signalBuffer.flowFiles.add(flowFile);
        if (logger.isDebugEnabled()) {
            logger.debug("Cached release signal identifier {} counterName {} from FlowFile {}", new Object[] { signalId, counterName, flowFile });
        }
    }
    signalBuffers.forEach((signalId, signalBuffer) -> {
        // retry after yielding for a while.
        try {
            protocol.notify(signalId, signalBuffer.deltas, signalBuffer.attributesToCache);
            signalBuffer.flowFiles.forEach(flowFile -> session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(true)), REL_SUCCESS));
        } catch (IOException e) {
            throw new RuntimeException(String.format("Unable to communicate with cache when processing %s due to %s", signalId, e), e);
        }
    });
}
Also used : StandardValidators(org.apache.nifi.processor.util.StandardValidators) CapabilityDescription(org.apache.nifi.annotation.documentation.CapabilityDescription) ResultType(org.apache.nifi.expression.AttributeExpression.ResultType) HashMap(java.util.HashMap) EventDriven(org.apache.nifi.annotation.behavior.EventDriven) ComponentLog(org.apache.nifi.logging.ComponentLog) StringUtils(org.apache.commons.lang3.StringUtils) PropertyDescriptor(org.apache.nifi.components.PropertyDescriptor) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) PropertyValue(org.apache.nifi.components.PropertyValue) HashSet(java.util.HashSet) Relationship(org.apache.nifi.processor.Relationship) Map(java.util.Map) Requirement(org.apache.nifi.annotation.behavior.InputRequirement.Requirement) AtomicDistributedMapCacheClient(org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient) FlowFile(org.apache.nifi.flowfile.FlowFile) ProcessContext(org.apache.nifi.processor.ProcessContext) Set(java.util.Set) IOException(java.io.IOException) ProcessSession(org.apache.nifi.processor.ProcessSession) WritesAttribute(org.apache.nifi.annotation.behavior.WritesAttribute) SeeAlso(org.apache.nifi.annotation.documentation.SeeAlso) List(java.util.List) InputRequirement(org.apache.nifi.annotation.behavior.InputRequirement) SupportsBatching(org.apache.nifi.annotation.behavior.SupportsBatching) AbstractProcessor(org.apache.nifi.processor.AbstractProcessor) Tags(org.apache.nifi.annotation.documentation.Tags) Collections(java.util.Collections) FlowFile(org.apache.nifi.flowfile.FlowFile) HashMap(java.util.HashMap) PropertyValue(org.apache.nifi.components.PropertyValue) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) AtomicDistributedMapCacheClient(org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient)

Example 4 with StringUtils.isBlank

use of org.apache.commons.lang3.StringUtils.isBlank in project nifi by apache.

the class PGList method doExecute.

@Override
public ProcessGroupsResult doExecute(final NiFiClient client, final Properties properties) throws NiFiClientException, IOException {
    final FlowClient flowClient = client.getFlowClient();
    // get the optional id of the parent PG, otherwise fallback to the root group
    String parentPgId = getArg(properties, CommandOption.PG_ID);
    if (StringUtils.isBlank(parentPgId)) {
        parentPgId = flowClient.getRootGroupId();
    }
    final ProcessGroupFlowEntity processGroupFlowEntity = flowClient.getProcessGroup(parentPgId);
    final ProcessGroupFlowDTO processGroupFlowDTO = processGroupFlowEntity.getProcessGroupFlow();
    final FlowDTO flowDTO = processGroupFlowDTO.getFlow();
    final List<ProcessGroupDTO> processGroups = new ArrayList<>();
    if (flowDTO.getProcessGroups() != null) {
        flowDTO.getProcessGroups().stream().map(pge -> pge.getComponent()).forEach(dto -> processGroups.add(dto));
    }
    return new ProcessGroupsResult(getResultType(properties), processGroups);
}
Also used : Properties(java.util.Properties) NiFiClientException(org.apache.nifi.toolkit.cli.impl.client.nifi.NiFiClientException) CommandOption(org.apache.nifi.toolkit.cli.impl.command.CommandOption) AbstractNiFiCommand(org.apache.nifi.toolkit.cli.impl.command.nifi.AbstractNiFiCommand) IOException(java.io.IOException) StringUtils(org.apache.commons.lang3.StringUtils) Context(org.apache.nifi.toolkit.cli.api.Context) ArrayList(java.util.ArrayList) NiFiClient(org.apache.nifi.toolkit.cli.impl.client.nifi.NiFiClient) List(java.util.List) ProcessGroupDTO(org.apache.nifi.web.api.dto.ProcessGroupDTO) ProcessGroupsResult(org.apache.nifi.toolkit.cli.impl.result.ProcessGroupsResult) FlowClient(org.apache.nifi.toolkit.cli.impl.client.nifi.FlowClient) ProcessGroupFlowEntity(org.apache.nifi.web.api.entity.ProcessGroupFlowEntity) ProcessGroupFlowDTO(org.apache.nifi.web.api.dto.flow.ProcessGroupFlowDTO) FlowDTO(org.apache.nifi.web.api.dto.flow.FlowDTO) ProcessGroupsResult(org.apache.nifi.toolkit.cli.impl.result.ProcessGroupsResult) ProcessGroupFlowDTO(org.apache.nifi.web.api.dto.flow.ProcessGroupFlowDTO) FlowDTO(org.apache.nifi.web.api.dto.flow.FlowDTO) ProcessGroupFlowDTO(org.apache.nifi.web.api.dto.flow.ProcessGroupFlowDTO) ProcessGroupFlowEntity(org.apache.nifi.web.api.entity.ProcessGroupFlowEntity) ArrayList(java.util.ArrayList) ProcessGroupDTO(org.apache.nifi.web.api.dto.ProcessGroupDTO) FlowClient(org.apache.nifi.toolkit.cli.impl.client.nifi.FlowClient)

Example 5 with StringUtils.isBlank

use of org.apache.commons.lang3.StringUtils.isBlank in project ddf by codice.

the class Historian method version.

/**
     * Versions updated {@link Metacard}s and {@link ContentItem}s.
     *
     * @param streamUpdateRequest   Needed to pass {@link ddf.catalog.core.versioning.MetacardVersion#SKIP_VERSIONING}
     *                              flag into downstream update
     * @param updateStorageResponse Versions this response's updated items
     * @return the update response originally passed in
     * @throws UnsupportedQueryException
     * @throws SourceUnavailableException
     * @throws IngestException
     */
public UpdateStorageResponse version(UpdateStorageRequest streamUpdateRequest, UpdateStorageResponse updateStorageResponse, UpdateResponse updateResponse) throws UnsupportedQueryException, SourceUnavailableException, IngestException {
    if (doSkip(updateStorageResponse)) {
        return updateStorageResponse;
    }
    setSkipFlag(streamUpdateRequest);
    setSkipFlag(updateStorageResponse);
    List<Metacard> updatedMetacards = updateStorageResponse.getUpdatedContentItems().stream().filter(ci -> StringUtils.isBlank(ci.getQualifier())).map(ContentItem::getMetacard).filter(Objects::nonNull).filter(isNotVersionNorDeleted).collect(Collectors.toList());
    Map<String, Metacard> originalMetacards = query(forIds(updatedMetacards.stream().map(Metacard::getId).collect(Collectors.toList())));
    Collection<ReadStorageRequest> ids = getReadStorageRequests(updatedMetacards);
    Map<String, List<ContentItem>> content = getContent(ids);
    Function<String, Action> getAction = (id) -> content.containsKey(id) ? Action.VERSIONED_CONTENT : Action.VERSIONED;
    Map<String, Metacard> versionMetacards = getVersionMetacards(originalMetacards.values(), getAction, (Subject) updateResponse.getProperties().get(SecurityConstants.SECURITY_SUBJECT));
    CreateStorageResponse createStorageResponse = versionContentItems(content, versionMetacards);
    if (createStorageResponse == null) {
        LOGGER.debug("Could not version content items.");
        return updateStorageResponse;
    }
    setResourceUriForContent(/*mutable*/
    versionMetacards, createStorageResponse);
    storeVersionMetacards(versionMetacards);
    return updateStorageResponse;
}
Also used : UnsupportedQueryException(ddf.catalog.source.UnsupportedQueryException) LoggerFactory(org.slf4j.LoggerFactory) StringUtils(org.apache.commons.lang3.StringUtils) ReadStorageRequest(ddf.catalog.content.operation.ReadStorageRequest) UpdateStorageRequest(ddf.catalog.content.operation.UpdateStorageRequest) DeletedMetacardImpl(ddf.catalog.core.versioning.impl.DeletedMetacardImpl) Map(java.util.Map) Action(ddf.catalog.core.versioning.MetacardVersion.Action) SubjectUtils(ddf.security.SubjectUtils) Bundle(org.osgi.framework.Bundle) UpdateStorageResponse(ddf.catalog.content.operation.UpdateStorageResponse) MetacardVersionImpl(ddf.catalog.core.versioning.impl.MetacardVersionImpl) Predicate(java.util.function.Predicate) Collection(java.util.Collection) Collectors(java.util.stream.Collectors) BundleContext(org.osgi.framework.BundleContext) MetacardType(ddf.catalog.data.MetacardType) Objects(java.util.Objects) StorageException(ddf.catalog.content.StorageException) List(java.util.List) Operation(ddf.catalog.operation.Operation) CreateStorageResponse(ddf.catalog.content.operation.CreateStorageResponse) Optional(java.util.Optional) UpdateResponse(ddf.catalog.operation.UpdateResponse) QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) ContentItemImpl(ddf.catalog.content.data.impl.ContentItemImpl) FilterBuilder(ddf.catalog.filter.FilterBuilder) SourceUnavailableException(ddf.catalog.source.SourceUnavailableException) AttributeImpl(ddf.catalog.data.impl.AttributeImpl) HashMap(java.util.HashMap) Callable(java.util.concurrent.Callable) DeleteResponse(ddf.catalog.operation.DeleteResponse) Function(java.util.function.Function) Update(ddf.catalog.operation.Update) ArrayList(java.util.ArrayList) Lists(com.google.common.collect.Lists) SKIP_VERSIONING(ddf.catalog.core.versioning.MetacardVersion.SKIP_VERSIONING) ContentItem(ddf.catalog.content.data.ContentItem) CreateResponse(ddf.catalog.operation.CreateResponse) Metacard(ddf.catalog.data.Metacard) SecurityConstants(ddf.security.SecurityConstants) StorageProvider(ddf.catalog.content.StorageProvider) ByteSource(com.google.common.io.ByteSource) Result(ddf.catalog.data.Result) Hashtable(java.util.Hashtable) Nullable(javax.annotation.Nullable) CreateRequestImpl(ddf.catalog.operation.impl.CreateRequestImpl) CreateStorageRequestImpl(ddf.catalog.content.operation.impl.CreateStorageRequestImpl) QueryImpl(ddf.catalog.operation.impl.QueryImpl) Logger(org.slf4j.Logger) Security(org.codice.ddf.security.common.Security) IngestException(ddf.catalog.source.IngestException) IOException(java.io.IOException) Subject(ddf.security.Subject) TimeUnit(java.util.concurrent.TimeUnit) StorageRequest(ddf.catalog.content.operation.StorageRequest) ReadStorageRequestImpl(ddf.catalog.content.operation.impl.ReadStorageRequestImpl) SourceResponse(ddf.catalog.operation.SourceResponse) CatalogProvider(ddf.catalog.source.CatalogProvider) Filter(org.opengis.filter.Filter) Collections(java.util.Collections) FrameworkUtil(org.osgi.framework.FrameworkUtil) InputStream(java.io.InputStream) UuidGenerator(org.codice.ddf.platform.util.uuidgenerator.UuidGenerator) ReadStorageResponse(ddf.catalog.content.operation.ReadStorageResponse) Action(ddf.catalog.core.versioning.MetacardVersion.Action) CreateStorageResponse(ddf.catalog.content.operation.CreateStorageResponse) Metacard(ddf.catalog.data.Metacard) ReadStorageRequest(ddf.catalog.content.operation.ReadStorageRequest) Objects(java.util.Objects) List(java.util.List) ArrayList(java.util.ArrayList)

Aggregations

StringUtils (org.apache.commons.lang3.StringUtils)54 List (java.util.List)33 Collectors (java.util.stream.Collectors)29 Map (java.util.Map)28 Set (java.util.Set)27 ArrayList (java.util.ArrayList)23 Optional (java.util.Optional)22 Collections (java.util.Collections)19 Logger (org.slf4j.Logger)19 LoggerFactory (org.slf4j.LoggerFactory)19 IOException (java.io.IOException)18 HashSet (java.util.HashSet)18 Collection (java.util.Collection)16 HashMap (java.util.HashMap)16 StopWatch (org.apache.commons.lang3.time.StopWatch)13 Autowired (org.springframework.beans.factory.annotation.Autowired)11 Slf4j (lombok.extern.slf4j.Slf4j)10 InputStream (java.io.InputStream)9 Inject (javax.inject.Inject)8 RegisteredTemplate (com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplate)7