Search in sources :

Example 91 with StopWatch

use of org.apache.commons.lang3.time.StopWatch in project RebornCore by TechReborn.

the class RebornExplosion method explode.

public void explode() {
    StopWatch watch = new StopWatch();
    watch.start();
    for (int tx = -radius; tx < radius + 1; tx++) {
        for (int ty = -radius; ty < radius + 1; ty++) {
            for (int tz = -radius; tz < radius + 1; tz++) {
                if (Math.sqrt(Math.pow(tx, 2) + Math.pow(ty, 2) + Math.pow(tz, 2)) <= radius - 2) {
                    BlockPos pos = center.add(tx, ty, tz);
                    IBlockState state = world.getBlockState(pos);
                    Block block = state.getBlock();
                    if (block != Blocks.BEDROCK && block != Blocks.AIR) {
                        block.onBlockDestroyedByExplosion(world, pos, this);
                        world.setBlockState(pos, Blocks.AIR.getDefaultState(), 3);
                    }
                }
            }
        }
    }
    RebornCore.logHelper.info("The explosion took" + watch + " to explode");
}
Also used : IBlockState(net.minecraft.block.state.IBlockState) Block(net.minecraft.block.Block) BlockPos(net.minecraft.util.math.BlockPos) StopWatch(org.apache.commons.lang3.time.StopWatch)

Example 92 with StopWatch

use of org.apache.commons.lang3.time.StopWatch in project nd4j by deeplearning4j.

the class AeronNDArraySerdeTest method timeOldVsNew.

@Test
public void timeOldVsNew() throws Exception {
    int numTrials = 1000;
    long oldTotal = 0;
    long newTotal = 0;
    INDArray arr = Nd4j.create(100000);
    Nd4j.getCompressor().compressi(arr, "GZIP");
    for (int i = 0; i < numTrials; i++) {
        StopWatch oldStopWatch = new StopWatch();
        BufferedOutputStream bos = new BufferedOutputStream(new ByteArrayOutputStream(arr.length()));
        DataOutputStream dos = new DataOutputStream(bos);
        oldStopWatch.start();
        Nd4j.write(arr, dos);
        oldStopWatch.stop();
        // System.out.println("Old " + oldStopWatch.getNanoTime());
        oldTotal += oldStopWatch.getNanoTime();
        StopWatch newStopWatch = new StopWatch();
        newStopWatch.start();
        AeronNDArraySerde.toBuffer(arr);
        newStopWatch.stop();
        // System.out.println("New " + newStopWatch.getNanoTime());
        newTotal += newStopWatch.getNanoTime();
    }
    oldTotal /= numTrials;
    newTotal /= numTrials;
    System.out.println("Old avg " + oldTotal + " New avg " + newTotal);
}
Also used : INDArray(org.nd4j.linalg.api.ndarray.INDArray) DataOutputStream(java.io.DataOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) BufferedOutputStream(java.io.BufferedOutputStream) StopWatch(org.apache.commons.lang3.time.StopWatch) Test(org.junit.Test)

Example 93 with StopWatch

use of org.apache.commons.lang3.time.StopWatch in project alf.io by alfio-event.

the class SpecialPriceTokenGenerator method generatePendingCodes.

public void generatePendingCodes() {
    StopWatch stopWatch = new StopWatch();
    log.trace("start pending codes generation");
    stopWatch.start();
    specialPriceRepository.findWaitingElements().forEach(this::generateCode);
    stopWatch.stop();
    log.trace("end. Took {} ms", stopWatch.getTime());
}
Also used : StopWatch(org.apache.commons.lang3.time.StopWatch)

Example 94 with StopWatch

use of org.apache.commons.lang3.time.StopWatch in project kylo by Teradata.

the class AbstractMergeTable method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLog();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final String blockingValue = context.getProperty(BLOCKING_KEY).evaluateAttributeExpressions(flowFile).getValue();
    String flowFileId = flowFile.getAttribute(CoreAttributes.UUID.key());
    boolean block = false;
    if (blocking && blockingCache.putIfAbsent(blockingValue, flowFileId) != null) {
        if (StringUtils.isBlank(flowFile.getAttribute(BLOCKED_START_TIME))) {
            flowFile = session.putAttribute(flowFile, BLOCKED_START_TIME, String.valueOf(System.currentTimeMillis()));
            getLogger().info("Transferring Flow file {} to blocked relationship", new Object[] { flowFile });
        }
        // penalize the flow file and transfer to BLOCKED
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_BLOCKED);
        return;
    }
    // Add Blocking time to flow file if this was a blocked flowfile.
    if (blocking && StringUtils.isNotBlank(flowFile.getAttribute(BLOCKED_START_TIME))) {
        String blockedStartTime = flowFile.getAttribute(BLOCKED_START_TIME);
        try {
            Long l = Long.parseLong(blockedStartTime);
            Long blockTime = System.currentTimeMillis() - l;
            getLogger().info("Processing Blocked flow file {}.  This was blocked for {} ms", new Object[] { flowFile, blockTime });
            flowFile = session.putAttribute(flowFile, BLOCKED_TIME, String.valueOf(blockTime) + " ms");
        } catch (NumberFormatException e) {
        }
    }
    String PROVENANCE_EXECUTION_STATUS_KEY = context.getName() + " Execution Status";
    String partitionSpecString = context.getProperty(PARTITION_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue();
    String sourceSchema = context.getProperty(SOURCE_SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
    String sourceTable = context.getProperty(SOURCE_TABLE).evaluateAttributeExpressions(flowFile).getValue();
    String targetSchema = context.getProperty(TARGET_SCHEMA).evaluateAttributeExpressions(flowFile).getValue();
    String targetTable = context.getProperty(TARGET_TABLE).evaluateAttributeExpressions(flowFile).getValue();
    String feedPartitionValue = context.getProperty(FEED_PARTITION).evaluateAttributeExpressions(flowFile).getValue();
    String mergeStrategyValue = context.getProperty(MERGE_STRATEGY).evaluateAttributeExpressions(flowFile).getValue();
    String hiveConfigurations = context.getProperty(HIVE_CONFIGURATIONS).evaluateAttributeExpressions(flowFile).getValue();
    boolean resetHive = context.getProperty(RESET_HIVE).asBoolean();
    final ColumnSpec[] columnSpecs = Optional.ofNullable(context.getProperty(FIELD_SPECIFICATION).evaluateAttributeExpressions(flowFile).getValue()).filter(StringUtils::isNotEmpty).map(ColumnSpec::createFromString).orElse(new ColumnSpec[0]);
    if (STRATEGY_PK_MERGE.equals(mergeStrategyValue) && (columnSpecs == null || columnSpecs.length == 0)) {
        getLog().error("Missing required field specification for PK merge feature");
        flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Failed: Missing required field specification for PK merge feature");
        release(blockingValue);
        session.transfer(flowFile, IngestProperties.REL_FAILURE);
        return;
    }
    // Maintain default for backward compatibility
    if (StringUtils.isEmpty(mergeStrategyValue)) {
        mergeStrategyValue = STRATEGY_DEDUPE_MERGE;
    }
    logger.info("Merge strategy: " + mergeStrategyValue + " Using Source: " + sourceTable + " Target: " + targetTable + " feed partition:" + feedPartitionValue + " partSpec: " + partitionSpecString);
    final StopWatch stopWatch = new StopWatch(true);
    try (final Connection conn = getConnection(context)) {
        TableMergeSyncSupport mergeSupport = new TableMergeSyncSupport(conn);
        if (resetHive) {
            mergeSupport.resetHiveConf();
        }
        mergeSupport.enableDynamicPartitions();
        if (StringUtils.isNotEmpty(hiveConfigurations)) {
            mergeSupport.setHiveConf(hiveConfigurations.split("\\|"));
        }
        PartitionSpec partitionSpec = new PartitionSpec(partitionSpecString);
        if (STRATEGY_DEDUPE_MERGE.equals(mergeStrategyValue)) {
            mergeSupport.doMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, true);
        } else if (STRATEGY_MERGE.equals(mergeStrategyValue)) {
            mergeSupport.doMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, false);
        } else if (STRATEGY_SYNC.equals(mergeStrategyValue)) {
            mergeSupport.doSync(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue);
        } else if (STRATEGY_ROLLING_SYNC.equals(mergeStrategyValue)) {
            mergeSupport.doRollingSync(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue);
        } else if (STRATEGY_PK_MERGE.equals(mergeStrategyValue)) {
            mergeSupport.doPKMerge(sourceSchema, sourceTable, targetSchema, targetTable, partitionSpec, feedPartitionValue, columnSpecs);
        } else {
            throw new UnsupportedOperationException("Failed to resolve the merge strategy");
        }
        session.getProvenanceReporter().modifyContent(flowFile, "Execution completed", stopWatch.getElapsed(TimeUnit.MILLISECONDS));
        flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Successful");
        release(blockingValue);
        logger.info("Execution completed: " + stopWatch.getElapsed(TimeUnit.MILLISECONDS) + " Merge strategy: " + mergeStrategyValue + " Using Source: " + sourceTable + " Target: " + targetTable + " feed partition:" + feedPartitionValue + " partSpec: " + partitionSpecString);
        session.transfer(flowFile, REL_SUCCESS);
    } catch (final Exception e) {
        logger.error("Unable to execute merge doMerge for {} due to {}; routing to failure", new Object[] { flowFile, e }, e);
        flowFile = session.putAttribute(flowFile, PROVENANCE_EXECUTION_STATUS_KEY, "Failed: " + e.getMessage());
        release(blockingValue);
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) ColumnSpec(com.thinkbiganalytics.util.ColumnSpec) Connection(java.sql.Connection) ComponentLog(org.apache.nifi.logging.ComponentLog) PartitionSpec(com.thinkbiganalytics.util.PartitionSpec) ProcessException(org.apache.nifi.processor.exception.ProcessException) StopWatch(org.apache.nifi.util.StopWatch) StringUtils(org.apache.commons.lang3.StringUtils) TableMergeSyncSupport(com.thinkbiganalytics.ingest.TableMergeSyncSupport)

Example 95 with StopWatch

use of org.apache.commons.lang3.time.StopWatch in project kylo by Teradata.

the class DefaultFeedManagerFeedService method deployFeed.

private NifiFeed deployFeed(final FeedMetadata feedMetadata, com.thinkbiganalytics.metadata.api.versioning.EntityVersion<Feed.ID, Feed> version) throws DeployFeedException {
    Stopwatch stopwatch = Stopwatch.createStarted();
    boolean enabled = false;
    if (feedMetadata.isActive()) {
        feedMetadata.setState(Feed.State.ENABLED.name());
        enabled = true;
    } else {
        feedMetadata.setState(Feed.State.DISABLED.name());
    }
    // Store ref to the originalFeedProperties before resolving and merging with the template
    List<NifiProperty> originalFeedProperties = feedMetadata.getProperties();
    // Get all the properties for the metadata
    RegisteredTemplate registeredTemplate = registeredTemplateService.findRegisteredTemplate(new RegisteredTemplateRequest.Builder().templateId(feedMetadata.getTemplateId()).templateName(feedMetadata.getTemplateName()).isFeedEdit(true).includeSensitiveProperties(true).build());
    // Copy the registered template properties it a new list so it doest get updated
    List<NifiProperty> templateProperties = registeredTemplate.getProperties().stream().map(nifiProperty -> new NifiProperty(nifiProperty)).collect(Collectors.toList());
    // Update the template properties with the feedMetadata properties
    NifiPropertyUtil.matchAndSetPropertyByProcessorName(templateProperties, feedMetadata.getProperties(), NifiPropertyUtil.PropertyUpdateMode.UPDATE_ALL_PROPERTIES);
    registeredTemplate.setProperties(templateProperties);
    feedMetadata.setProperties(registeredTemplate.getProperties());
    feedMetadata.setRegisteredTemplate(registeredTemplate);
    // Skip any properties that the user supplied which are not ${ values
    List<NifiProperty> propertiesToSkip = originalFeedProperties.stream().filter(property -> !propertyExpressionResolver.containsVariablesPatterns(property.getValue())).collect(Collectors.toList());
    List<NifiProperty> templatePropertiesToSkip = registeredTemplate.getProperties().stream().filter(property -> property.isSelected() && !propertyExpressionResolver.containsVariablesPatterns(property.getValue())).collect(Collectors.toList());
    if (templatePropertiesToSkip != null && !templatePropertiesToSkip.isEmpty()) {
        propertiesToSkip.addAll(templatePropertiesToSkip);
    }
    // Resolve any ${metadata.} properties
    propertyExpressionResolver.resolvePropertyExpressions(feedMetadata, propertiesToSkip);
    // decrypt the metadata
    feedModelTransform.decryptSensitivePropertyValues(feedMetadata);
    // if this is the very first version we need to enable it later (after the data has been sync'd with ops manager)
    boolean enableLater = false;
    if (enabled && version.isFirstVersion()) {
        enabled = false;
        enableLater = true;
        feedMetadata.setState(FeedMetadata.STATE.DISABLED.name());
    }
    CreateFeedBuilder feedBuilder = CreateFeedBuilder.newFeed(nifiRestClient, nifiFlowCache, feedMetadata, registeredTemplate.getNifiTemplateId(), propertyExpressionResolver, propertyDescriptorTransform, niFiObjectCache, templateConnectionUtil).enabled(enabled).setOriginalFeedProperties(originalFeedProperties).removeInactiveVersionedProcessGroup(removeInactiveNifiVersionedFeedFlows).autoAlign(nifiAutoFeedsAlignAfterSave).withNiFiTemplateCache(niFiTemplateCache);
    if (registeredTemplate.isReusableTemplate()) {
        feedBuilder.setReusableTemplate(true);
        feedMetadata.setIsReusableFeed(true);
    } else {
        feedBuilder.inputProcessorType(feedMetadata.getInputProcessorType()).feedSchedule(feedMetadata.getSchedule()).properties(feedMetadata.getProperties());
        if (registeredTemplate.usesReusableTemplate()) {
            for (ReusableTemplateConnectionInfo connection : registeredTemplate.getReusableTemplateConnections()) {
                feedBuilder.addInputOutputPort(new InputOutputPort(connection.getReusableTemplateInputPortName(), connection.getFeedOutputPortName()));
            }
        }
    }
    stopwatch.stop();
    log.debug("Time to prepare data for saving feed in NiFi: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
    stopwatch.reset();
    stopwatch.start();
    NifiProcessGroup entity = feedBuilder.build();
    stopwatch.stop();
    log.debug("Time to save feed in NiFi: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
    stopwatch.reset();
    NifiFeed feed = new NifiFeed(feedMetadata, entity);
    // Set the original feedProperties back to the feed
    feedMetadata.setProperties(originalFeedProperties);
    // Encrypt the metadata properties
    feedModelTransform.encryptSensitivePropertyValues(feedMetadata);
    if (entity.isSuccess()) {
        feedMetadata.setNifiProcessGroupId(entity.getProcessGroupEntity().getId());
        try {
            stopwatch.start();
            saveDeployedFeed(feedMetadata, version);
            // tell NiFi if this is a streaming feed or not
            if (feedMetadata.getRegisteredTemplate().isStream()) {
                streamingFeedJmsNotificationService.updateNiFiStatusJMSTopic(entity, feedMetadata);
            }
            feed.setSuccess(true);
            feed.setEnableAfterSave(enableLater);
            stopwatch.stop();
            log.debug("Time to saveFeed in Kylo: {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS));
            stopwatch.reset();
            stopwatch.start();
            feedBuilder.checkAndRemoveVersionedProcessGroup();
        } catch (Exception e) {
            feed.setSuccess(false);
            feed.addErrorMessage(e);
        }
    } else {
        feed.setSuccess(false);
    }
    if (!feed.isSuccess()) {
        if (!entity.isRolledBack()) {
            try {
                feedBuilder.rollback();
            } catch (FeedRollbackException rollbackException) {
                log.error("Error rolling back feed {}. {} ", feedMetadata.getCategoryAndFeedName(), rollbackException.getMessage());
                feed.addErrorMessage("Error occurred in rolling back the Feed.");
            }
            entity.setRolledBack(true);
        }
        throw new DeployFeedException(feed);
    }
    // Move to isSuccess block??
    feedHistoryDataReindexingService.updateHistoryDataReindexingFeedsAvailableCache(feedMetadata);
    return feed;
}
Also used : Action(com.thinkbiganalytics.security.action.Action) RegisteredTemplateService(com.thinkbiganalytics.feedmgr.service.template.RegisteredTemplateService) Category(com.thinkbiganalytics.metadata.api.category.Category) DataSourceProvider(com.thinkbiganalytics.metadata.api.catalog.DataSourceProvider) ReusableTemplateConnectionInfo(com.thinkbiganalytics.feedmgr.rest.model.ReusableTemplateConnectionInfo) Autowired(org.springframework.beans.factory.annotation.Autowired) StringUtils(org.apache.commons.lang3.StringUtils) FeedProvider(com.thinkbiganalytics.metadata.api.feed.FeedProvider) FeedAccessControl(com.thinkbiganalytics.metadata.api.feed.security.FeedAccessControl) Map(java.util.Map) FeedPropertyChangeEvent(com.thinkbiganalytics.metadata.api.event.feed.FeedPropertyChangeEvent) AccessController(com.thinkbiganalytics.security.AccessController) NifiFeed(com.thinkbiganalytics.feedmgr.rest.model.NifiFeed) CategoryAccessControl(com.thinkbiganalytics.metadata.api.category.security.CategoryAccessControl) FeedServicesAccessControl(com.thinkbiganalytics.feedmgr.security.FeedServicesAccessControl) SecurityContextHolder(org.springframework.security.core.context.SecurityContextHolder) FeedManagerTemplateProvider(com.thinkbiganalytics.metadata.api.template.FeedManagerTemplateProvider) FeedManagerTemplateService(com.thinkbiganalytics.feedmgr.service.template.FeedManagerTemplateService) MetadataEventListener(com.thinkbiganalytics.metadata.api.event.MetadataEventListener) Obligation(com.thinkbiganalytics.metadata.rest.model.sla.Obligation) NifiProperty(com.thinkbiganalytics.nifi.rest.model.NifiProperty) LabelValue(com.thinkbiganalytics.rest.model.LabelValue) ActionGroup(com.thinkbiganalytics.security.rest.model.ActionGroup) PageRequest(org.springframework.data.domain.PageRequest) Set(java.util.Set) Page(org.springframework.data.domain.Page) EntityVersionDifference(com.thinkbiganalytics.feedmgr.rest.model.EntityVersionDifference) MetadataEventService(com.thinkbiganalytics.metadata.api.event.MetadataEventService) Serializable(java.io.Serializable) DraftEntityVersion(com.thinkbiganalytics.feedmgr.rest.model.DraftEntityVersion) CategoryProvider(com.thinkbiganalytics.metadata.api.category.CategoryProvider) SecurityModelTransform(com.thinkbiganalytics.security.rest.controller.SecurityModelTransform) DataSourceNotFoundException(com.thinkbiganalytics.metadata.api.catalog.DataSourceNotFoundException) FeedMetadata(com.thinkbiganalytics.feedmgr.rest.model.FeedMetadata) MetadataRepositoryException(com.thinkbiganalytics.metadata.modeshape.MetadataRepositoryException) ServiceLevelAgreementBuilder(com.thinkbiganalytics.metadata.sla.spi.ServiceLevelAgreementBuilder) RegisteredTemplateRequest(com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplateRequest) ArrayList(java.util.ArrayList) Value(org.springframework.beans.factory.annotation.Value) NiFiObjectCache(com.thinkbiganalytics.nifi.rest.NiFiObjectCache) DerivedDatasourceFactory(com.thinkbiganalytics.feedmgr.service.feed.datasource.DerivedDatasourceFactory) MetadataChange(com.thinkbiganalytics.metadata.api.event.MetadataChange) RegisteredTemplate(com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplate) ObligationGroup(com.thinkbiganalytics.metadata.sla.api.ObligationGroup) Nullable(javax.annotation.Nullable) Datasource(com.thinkbiganalytics.metadata.api.datasource.Datasource) Properties(java.util.Properties) SecurityService(com.thinkbiganalytics.feedmgr.service.security.SecurityService) FeedProperties(com.thinkbiganalytics.metadata.api.feed.FeedProperties) FeedManagerTemplate(com.thinkbiganalytics.metadata.api.template.FeedManagerTemplate) HadoopSecurityGroup(com.thinkbiganalytics.metadata.api.security.HadoopSecurityGroup) ImmutableAllowableAction(com.thinkbiganalytics.security.role.ImmutableAllowableAction) TemplateConnectionUtil(com.thinkbiganalytics.feedmgr.nifi.TemplateConnectionUtil) HadoopAuthorizationService(com.thinkbiganalytics.datalake.authorization.service.HadoopAuthorizationService) ListUtils(org.apache.commons.collections.ListUtils) LoggerFactory(org.slf4j.LoggerFactory) FeedChange(com.thinkbiganalytics.metadata.api.event.feed.FeedChange) NiFiPropertyDescriptorTransform(com.thinkbiganalytics.nifi.rest.model.NiFiPropertyDescriptorTransform) Precondition(com.thinkbiganalytics.policy.precondition.Precondition) PreDestroy(javax.annotation.PreDestroy) DataSetProvider(com.thinkbiganalytics.metadata.api.catalog.DataSetProvider) NifiPropertyUtil(com.thinkbiganalytics.nifi.rest.support.NifiPropertyUtil) Pageable(org.springframework.data.domain.Pageable) MetadataAccess(com.thinkbiganalytics.metadata.api.MetadataAccess) CatalogModelTransform(com.thinkbiganalytics.kylo.catalog.rest.model.CatalogModelTransform) FeedVersions(com.thinkbiganalytics.feedmgr.rest.model.FeedVersions) DeployResponseEntityVersion(com.thinkbiganalytics.feedmgr.rest.model.DeployResponseEntityVersion) FeedDestination(com.thinkbiganalytics.metadata.api.feed.FeedDestination) OpsManagerFeedProvider(com.thinkbiganalytics.metadata.api.feed.OpsManagerFeedProvider) ImmutableAllowedActions(com.thinkbiganalytics.security.role.ImmutableAllowedActions) UserField(com.thinkbiganalytics.feedmgr.rest.model.UserField) ImmutableMap(com.google.common.collect.ImmutableMap) DataSetNotFoundException(com.thinkbiganalytics.metadata.api.catalog.DataSetNotFoundException) Collection(java.util.Collection) Collectors(java.util.stream.Collectors) Sets(com.google.common.collect.Sets) NotFoundException(javax.ws.rs.NotFoundException) EntityVersion(com.thinkbiganalytics.feedmgr.rest.model.EntityVersion) FeedSummary(com.thinkbiganalytics.feedmgr.rest.model.FeedSummary) InputOutputPort(com.thinkbiganalytics.nifi.feedmgr.InputOutputPort) List(java.util.List) Principal(java.security.Principal) DataSource(com.thinkbiganalytics.metadata.api.catalog.DataSource) PostConstruct(javax.annotation.PostConstruct) Optional(java.util.Optional) NifiProcessGroup(com.thinkbiganalytics.nifi.rest.model.NifiProcessGroup) DerivedDatasource(com.thinkbiganalytics.metadata.api.datasource.DerivedDatasource) PreconditionRule(com.thinkbiganalytics.policy.rest.model.PreconditionRule) DataAccessException(org.springframework.dao.DataAccessException) Stopwatch(com.google.common.base.Stopwatch) Feed(com.thinkbiganalytics.metadata.api.feed.Feed) HashMap(java.util.HashMap) AllowedActions(com.thinkbiganalytics.security.action.AllowedActions) UserProperty(com.thinkbiganalytics.feedmgr.rest.model.UserProperty) HashSet(java.util.HashSet) Inject(javax.inject.Inject) UIFeed(com.thinkbiganalytics.feedmgr.rest.model.UIFeed) PropertyExpressionResolver(com.thinkbiganalytics.feedmgr.nifi.PropertyExpressionResolver) ServiceLevelAgreementService(com.thinkbiganalytics.feedmgr.sla.ServiceLevelAgreementService) FeedChangeEvent(com.thinkbiganalytics.metadata.api.event.feed.FeedChangeEvent) Qualifier(org.springframework.beans.factory.annotation.Qualifier) FeedRollbackException(com.thinkbiganalytics.nifi.feedmgr.FeedRollbackException) FeedSource(com.thinkbiganalytics.metadata.api.feed.FeedSource) Nonnull(javax.annotation.Nonnull) FeedNotFoundException(com.thinkbiganalytics.metadata.api.feed.FeedNotFoundException) Logger(org.slf4j.Logger) FeedNameUtil(com.thinkbiganalytics.support.FeedNameUtil) CreateFeedBuilder(com.thinkbiganalytics.feedmgr.nifi.CreateFeedBuilder) FeedHistoryDataReindexingService(com.thinkbiganalytics.feedmgr.service.feed.reindexing.FeedHistoryDataReindexingService) DateTime(org.joda.time.DateTime) UserFieldDescriptor(com.thinkbiganalytics.metadata.api.extension.UserFieldDescriptor) FieldRuleProperty(com.thinkbiganalytics.policy.rest.model.FieldRuleProperty) TimeUnit(java.util.concurrent.TimeUnit) NifiFlowCache(com.thinkbiganalytics.feedmgr.nifi.cache.NifiFlowCache) AbstractMap(java.util.AbstractMap) UserPropertyTransform(com.thinkbiganalytics.feedmgr.service.UserPropertyTransform) NiFiTemplateCache(com.thinkbiganalytics.feedmgr.service.template.NiFiTemplateCache) DependentFeedPrecondition(com.thinkbiganalytics.policy.precondition.DependentFeedPrecondition) PreconditionPolicyTransformer(com.thinkbiganalytics.policy.precondition.transform.PreconditionPolicyTransformer) Comparator(java.util.Comparator) Collections(java.util.Collections) LegacyNifiRestClient(com.thinkbiganalytics.nifi.rest.client.LegacyNifiRestClient) FeedRollbackException(com.thinkbiganalytics.nifi.feedmgr.FeedRollbackException) ServiceLevelAgreementBuilder(com.thinkbiganalytics.metadata.sla.spi.ServiceLevelAgreementBuilder) CreateFeedBuilder(com.thinkbiganalytics.feedmgr.nifi.CreateFeedBuilder) Stopwatch(com.google.common.base.Stopwatch) InputOutputPort(com.thinkbiganalytics.nifi.feedmgr.InputOutputPort) CreateFeedBuilder(com.thinkbiganalytics.feedmgr.nifi.CreateFeedBuilder) DataSourceNotFoundException(com.thinkbiganalytics.metadata.api.catalog.DataSourceNotFoundException) MetadataRepositoryException(com.thinkbiganalytics.metadata.modeshape.MetadataRepositoryException) DataSetNotFoundException(com.thinkbiganalytics.metadata.api.catalog.DataSetNotFoundException) NotFoundException(javax.ws.rs.NotFoundException) DataAccessException(org.springframework.dao.DataAccessException) FeedRollbackException(com.thinkbiganalytics.nifi.feedmgr.FeedRollbackException) FeedNotFoundException(com.thinkbiganalytics.metadata.api.feed.FeedNotFoundException) NifiProperty(com.thinkbiganalytics.nifi.rest.model.NifiProperty) RegisteredTemplate(com.thinkbiganalytics.feedmgr.rest.model.RegisteredTemplate) NifiProcessGroup(com.thinkbiganalytics.nifi.rest.model.NifiProcessGroup) ReusableTemplateConnectionInfo(com.thinkbiganalytics.feedmgr.rest.model.ReusableTemplateConnectionInfo) NifiFeed(com.thinkbiganalytics.feedmgr.rest.model.NifiFeed)

Aggregations

StopWatch (org.apache.commons.lang3.time.StopWatch)528 Test (org.junit.Test)150 EventResult (org.alfresco.bm.event.EventResult)97 DBObject (com.mongodb.DBObject)90 Event (org.alfresco.bm.event.Event)87 FolderData (org.alfresco.bm.cm.FolderData)75 File (java.io.File)71 ArrayList (java.util.ArrayList)49 HashSet (java.util.HashSet)31 Gene (ubic.gemma.model.genome.Gene)31 Vertex (org.apache.tinkerpop.gremlin.structure.Vertex)26 BaseTest (org.umlg.sqlg.test.BaseTest)26 Element (org.w3c.dom.Element)25 IOException (java.io.IOException)23 LoadSingleComponentUnitTest (org.alfresco.bm.dataload.LoadSingleComponentUnitTest)23 UserModel (org.alfresco.utility.model.UserModel)23 Collectors (java.util.stream.Collectors)19 HashMap (java.util.HashMap)18 List (java.util.List)18 ExpressionExperiment (ubic.gemma.model.expression.experiment.ExpressionExperiment)18