Search in sources :

Example 26 with StringUtils

use of org.apache.commons.lang3.StringUtils in project samza by apache.

the class JobModelHelper method getProcessorLocality.

/**
 * Retrieves and returns the processor locality of a samza job using provided {@see Config} and {@see LocalityManager}.
 * @param config provides the configurations defined by the user. Required to connect to the storage layer.
 * @param localityManager provides the processor to host mapping persisted to the metadata store.
 * @return the processor locality.
 */
private static Map<String, LocationId> getProcessorLocality(Config config, LocalityManager localityManager) {
    Map<String, LocationId> containerToLocationId = new HashMap<>();
    Map<String, ProcessorLocality> existingContainerLocality = localityManager.readLocality().getProcessorLocalities();
    for (int i = 0; i < new JobConfig(config).getContainerCount(); i++) {
        String containerId = Integer.toString(i);
        LocationId locationId = Optional.ofNullable(existingContainerLocality.get(containerId)).map(ProcessorLocality::host).filter(StringUtils::isNotEmpty).map(LocationId::new).orElse(new LocationId("ANY_HOST"));
        containerToLocationId.put(containerId, locationId);
    }
    return containerToLocationId;
}
Also used : ProcessorLocality(org.apache.samza.job.model.ProcessorLocality) HashMap(java.util.HashMap) StringUtils(org.apache.commons.lang3.StringUtils) LocationId(org.apache.samza.runtime.LocationId) JobConfig(org.apache.samza.config.JobConfig)

Example 27 with StringUtils

use of org.apache.commons.lang3.StringUtils in project neo4j by neo4j.

the class ServiceAnnotationProcessor method loadIfExists.

private SortedSet<String> loadIfExists(String path) {
    final SortedSet<String> result = new TreeSet<>();
    try {
        final FileObject file = processingEnv.getFiler().getResource(CLASS_OUTPUT, "", path);
        final List<String> lines = new ArrayList<>();
        try (BufferedReader in = new BufferedReader(new InputStreamReader(file.openInputStream(), StandardCharsets.UTF_8))) {
            String line;
            while ((line = in.readLine()) != null) {
                lines.add(line);
            }
        }
        lines.stream().map(s -> substringBefore(s, "#")).map(String::trim).filter(StringUtils::isNotEmpty).forEach(result::add);
        info("Loaded existing providers: " + result);
    } catch (IOException ignore) {
        info("No existing providers loaded");
    }
    return result;
}
Also used : SortedSet(java.util.SortedSet) AbstractProcessor(javax.annotation.processing.AbstractProcessor) TypeElement(javax.lang.model.element.TypeElement) StringUtils(org.apache.commons.lang3.StringUtils) Elements(javax.lang.model.util.Elements) ERROR(javax.tools.Diagnostic.Kind.ERROR) Multimaps(org.eclipse.collections.impl.factory.Multimaps) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) FileObject(javax.tools.FileObject) DEFAULT_NEW_LINE(org.neo4j.annotations.AnnotationConstants.DEFAULT_NEW_LINE) MutableMultimap(org.eclipse.collections.api.multimap.MutableMultimap) CLASS_OUTPUT(javax.tools.StandardLocation.CLASS_OUTPUT) Collectors.toSet(java.util.stream.Collectors.toSet) BufferedWriter(java.io.BufferedWriter) Set(java.util.Set) ExceptionUtils.getStackTrace(org.apache.commons.lang3.exception.ExceptionUtils.getStackTrace) UnifiedSet.newSetWith(org.eclipse.collections.impl.set.mutable.UnifiedSet.newSetWith) IOException(java.io.IOException) Element(javax.lang.model.element.Element) Types(javax.lang.model.util.Types) InputStreamReader(java.io.InputStreamReader) StandardCharsets(java.nio.charset.StandardCharsets) String.format(java.lang.String.format) SourceVersion(javax.lang.model.SourceVersion) NOTE(javax.tools.Diagnostic.Kind.NOTE) List(java.util.List) TypeMirror(javax.lang.model.type.TypeMirror) Collectors.toList(java.util.stream.Collectors.toList) RoundEnvironment(javax.annotation.processing.RoundEnvironment) ProcessingEnvironment(javax.annotation.processing.ProcessingEnvironment) Optional(java.util.Optional) BufferedReader(java.io.BufferedReader) StringUtils.substringBefore(org.apache.commons.lang3.StringUtils.substringBefore) InputStreamReader(java.io.InputStreamReader) TreeSet(java.util.TreeSet) ArrayList(java.util.ArrayList) BufferedReader(java.io.BufferedReader) FileObject(javax.tools.FileObject) IOException(java.io.IOException)

Example 28 with StringUtils

use of org.apache.commons.lang3.StringUtils in project flink by apache.

the class EmulatedFullTopologyTest method testFullTopology.

// ======================================================================================================
// IMPORTANT: This test makes use of things that happen in the emulated PubSub that
// are GUARANTEED to be different in the real Google hosted PubSub.
// So running these tests against the real thing will have a very high probability of
// failing.
// The assumptions:
// 1) The ordering of the messages is maintained.
// We are inserting a STOP_MARKER _after_ the set of test measurements and we assume this
// STOP event will
// arrive after the actual test data so we can stop the processing. In the real PubSub this
// is NOT true.
// 2) Exactly once: We assume that every message we put in comes out exactly once.
// In the real PubSub there are a lot of situations (mostly failure/retry) where this is not
// true.
@Test
public void testFullTopology() throws Exception {
    // ===============================================================================
    // Step 0: The test data
    List<String> input = new ArrayList<>(Arrays.asList("One", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine", "Ten"));
    List<String> messagesToSend = new ArrayList<>(input);
    // Now add some stream termination messages.
    // NOTE: Messages are pulled from PubSub in batches by the source.
    // So we need enough STOP_MARKERs to ensure ALL parallel tasks get at least one
    // STOP_MARKER
    // If not then at least one task will not terminate and the test will not end.
    // We pull 3 at a time, have 4 parallel: We need at least 12 STOP_MARKERS
    IntStream.rangeClosed(1, 20).forEach(i -> messagesToSend.add(STOP_MARKER));
    // IMPORTANT NOTE: This way of testing uses an effect of the PubSub emulator that is
    // absolutely
    // guaranteed NOT to work in the real PubSub: The ordering of the messages is maintained in
    // the topic.
    // So here we can assume that if we add a stop message LAST we can terminate the test stream
    // when we see it.
    // ===============================================================================
    // Step 1: We put test data into the topic
    // Publish the test messages into the input topic
    Publisher publisher = pubsubHelper.createPublisher(PROJECT_NAME, INPUT_TOPIC_NAME);
    for (String s : messagesToSend) {
        publisher.publish(PubsubMessage.newBuilder().setData(ByteString.copyFromUtf8(s)).build()).get();
    }
    publisher.shutdown();
    // ===============================================================================
    // Step 2: Now we run our topology
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.enableCheckpointing(100);
    env.setParallelism(4);
    env.setRestartStrategy(RestartStrategies.noRestart());
    // Silly topology
    env.addSource(// a self termination feature.
    PubSubSource.newBuilder().withDeserializationSchema(new SimpleStringSchemaWithStopMarkerDetection()).withProjectName(PROJECT_NAME).withSubscriptionName(INPUT_SUBSCRIPTION_NAME).withCredentials(EmulatorCredentials.getInstance()).withPubSubSubscriberFactory(new PubSubSubscriberFactoryForEmulator(getPubSubHostPort(), PROJECT_NAME, INPUT_SUBSCRIPTION_NAME, 1, Duration.ofSeconds(1), 3)).build()).map((MapFunction<String, String>) StringUtils::reverse).addSink(PubSubSink.newBuilder().withSerializationSchema(new SimpleStringSchema()).withProjectName(PROJECT_NAME).withTopicName(OUTPUT_TOPIC_NAME).withCredentials(EmulatorCredentials.getInstance()).withHostAndPortForEmulator(getPubSubHostPort()).build());
    env.execute("Running unit test");
    // ===============================================================================
    // Now we should have all the resulting data in the output topic.
    // Step 3: Get the result from the output topic and verify if everything is there
    List<ReceivedMessage> receivedMessages = pubsubHelper.pullMessages(PROJECT_NAME, OUTPUT_SUBSCRIPTION_NAME, 100);
    assertEquals("Wrong number of elements", input.size(), receivedMessages.size());
    // Check output strings
    List<String> output = new ArrayList<>();
    // Extract the actual Strings from the ReceivedMessages
    receivedMessages.forEach(msg -> output.add(msg.getMessage().getData().toStringUtf8()));
    for (String test : input) {
        String reversedTest = org.apache.commons.lang3.StringUtils.reverse(test);
        LOG.info("Checking if \"{}\" --> \"{}\" exists", test, reversedTest);
        assertTrue("Missing " + test, output.contains(reversedTest));
    }
// ===============================================================================
}
Also used : StringUtils(org.apache.commons.lang3.StringUtils) PubSubSubscriberFactoryForEmulator(org.apache.flink.streaming.connectors.gcp.pubsub.emulator.PubSubSubscriberFactoryForEmulator) ArrayList(java.util.ArrayList) SimpleStringSchema(org.apache.flink.api.common.serialization.SimpleStringSchema) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) ByteString(com.google.protobuf.ByteString) Publisher(com.google.cloud.pubsub.v1.Publisher) MapFunction(org.apache.flink.api.common.functions.MapFunction) ReceivedMessage(com.google.pubsub.v1.ReceivedMessage) Test(org.junit.Test)

Example 29 with StringUtils

use of org.apache.commons.lang3.StringUtils in project dhis2-core by dhis2.

the class EnrollmentTimeFieldSqlRenderer method getSqlConditionForNonDefaultBoundaries.

@Override
protected String getSqlConditionForNonDefaultBoundaries(EventQueryParams params) {
    String sql = params.getProgramIndicator().getAnalyticsPeriodBoundaries().stream().filter(boundary -> boundary.isCohortDateBoundary() && !boundary.isEnrollmentHavingEventDateCohortBoundary()).map(boundary -> statementBuilder.getBoundaryCondition(boundary, params.getProgramIndicator(), params.getTimeFieldAsField(), params.getEarliestStartDate(), params.getLatestEndDate())).collect(Collectors.joining(" and "));
    String sqlEventCohortBoundary = params.getProgramIndicator().hasEventDateCohortBoundary() ? getProgramIndicatorEventInProgramStageSql(params.getProgramIndicator(), params.getEarliestStartDate(), params.getLatestEndDate()) : "";
    return Stream.of(sql, sqlEventCohortBoundary).filter(StringUtils::isNotBlank).collect(Collectors.joining(" and "));
}
Also used : DimensionalItemObject(org.hisp.dhis.common.DimensionalItemObject) Getter(lombok.Getter) TimeField(org.hisp.dhis.analytics.TimeField) Date(java.util.Date) AnalyticsSqlUtils.quote(org.hisp.dhis.analytics.util.AnalyticsSqlUtils.quote) RequiredArgsConstructor(lombok.RequiredArgsConstructor) SimpleDateFormat(java.text.SimpleDateFormat) StringUtils(org.apache.commons.lang3.StringUtils) Collections.singleton(java.util.Collections.singleton) ANALYTICS_TBL_ALIAS(org.hisp.dhis.analytics.util.AnalyticsSqlUtils.ANALYTICS_TBL_ALIAS) Map(java.util.Map) TextUtils.getQuotedCommaDelimitedString(org.hisp.dhis.commons.util.TextUtils.getQuotedCommaDelimitedString) Period(org.hisp.dhis.period.Period) EventQueryParams(org.hisp.dhis.analytics.event.EventQueryParams) AnalyticsPeriodBoundary(org.hisp.dhis.program.AnalyticsPeriodBoundary) PERIOD_DIM_ID(org.hisp.dhis.common.DimensionalObject.PERIOD_DIM_ID) Collection(java.util.Collection) DateUtils.getMediumDateString(org.hisp.dhis.util.DateUtils.getMediumDateString) Set(java.util.Set) StatementBuilder(org.hisp.dhis.jdbc.StatementBuilder) Collectors(java.util.stream.Collectors) List(java.util.List) Component(org.springframework.stereotype.Component) IdentifiableObjectUtils.getUids(org.hisp.dhis.common.IdentifiableObjectUtils.getUids) Stream(java.util.stream.Stream) AnalyticsSqlUtils.quoteAlias(org.hisp.dhis.analytics.util.AnalyticsSqlUtils.quoteAlias) Optional(java.util.Optional) ProgramIndicator(org.hisp.dhis.program.ProgramIndicator) DateUtils.plusOneDay(org.hisp.dhis.util.DateUtils.plusOneDay) Assert(org.springframework.util.Assert) TextUtils.getQuotedCommaDelimitedString(org.hisp.dhis.commons.util.TextUtils.getQuotedCommaDelimitedString) DateUtils.getMediumDateString(org.hisp.dhis.util.DateUtils.getMediumDateString)

Example 30 with StringUtils

use of org.apache.commons.lang3.StringUtils in project hub-detect by blackducksoftware.

the class GradleInspectorExtractor method extract.

public Extraction extract(final File directory, final String gradleExe, final String gradleInspector, final File outputDirectory) {
    try {
        String gradleCommand = detectConfiguration.getProperty(DetectProperty.DETECT_GRADLE_BUILD_COMMAND, PropertyAuthority.None);
        final List<String> arguments = new ArrayList<>();
        if (StringUtils.isNotBlank(gradleCommand)) {
            gradleCommand = gradleCommand.replaceAll("dependencies", "").trim();
            Arrays.stream(gradleCommand.split(" ")).filter(StringUtils::isNotBlank).forEach(arguments::add);
        }
        arguments.add("dependencies");
        arguments.add(String.format("--init-script=%s", gradleInspector));
        arguments.add(String.format("-DGRADLEEXTRACTIONDIR=%s", outputDirectory.getCanonicalPath()));
        arguments.add("--info");
        final Executable executable = new Executable(directory, gradleExe, arguments);
        final ExecutableOutput output = executableRunner.execute(executable);
        if (output.getReturnCode() == 0) {
            final File rootProjectMetadataFile = detectFileFinder.findFile(outputDirectory, "rootProjectMetadata.txt");
            final List<File> codeLocationFiles = detectFileFinder.findFiles(outputDirectory, "*_dependencyGraph.txt");
            final List<DetectCodeLocation> codeLocations = new ArrayList<>();
            String projectName = null;
            String projectVersion = null;
            if (codeLocationFiles != null) {
                codeLocationFiles.stream().map(codeLocationFile -> gradleReportParser.parseDependencies(codeLocationFile)).filter(Optional::isPresent).map(Optional::get).forEach(codeLocations::add);
                if (rootProjectMetadataFile != null) {
                    final Optional<NameVersion> projectNameVersion = gradleReportParser.parseRootProjectNameVersion(rootProjectMetadataFile);
                    if (projectNameVersion.isPresent()) {
                        projectName = projectNameVersion.get().getName();
                        projectVersion = projectNameVersion.get().getVersion();
                    }
                } else {
                    logger.warn("Gradle inspector did not create a meta data report so no project version information was found.");
                }
            }
            return new Extraction.Builder().success(codeLocations).projectName(projectName).projectVersion(projectVersion).build();
        } else {
            return new Extraction.Builder().failure("The gradle inspector returned a non-zero exit code: " + output.getReturnCode()).build();
        }
    } catch (final Exception e) {
        return new Extraction.Builder().exception(e).build();
    }
}
Also used : Arrays(java.util.Arrays) Logger(org.slf4j.Logger) Executable(com.blackducksoftware.integration.hub.detect.util.executable.Executable) Extraction(com.blackducksoftware.integration.hub.detect.workflow.extraction.Extraction) LoggerFactory(org.slf4j.LoggerFactory) ExecutableRunner(com.blackducksoftware.integration.hub.detect.util.executable.ExecutableRunner) StringUtils(org.apache.commons.lang3.StringUtils) File(java.io.File) DetectConfiguration(com.blackducksoftware.integration.hub.detect.configuration.DetectConfiguration) ArrayList(java.util.ArrayList) NameVersion(com.synopsys.integration.util.NameVersion) List(java.util.List) ExecutableOutput(com.blackducksoftware.integration.hub.detect.util.executable.ExecutableOutput) PropertyAuthority(com.blackducksoftware.integration.hub.detect.configuration.PropertyAuthority) Optional(java.util.Optional) DetectFileFinder(com.blackducksoftware.integration.hub.detect.workflow.file.DetectFileFinder) DetectCodeLocation(com.blackducksoftware.integration.hub.detect.workflow.codelocation.DetectCodeLocation) DetectProperty(com.blackducksoftware.integration.hub.detect.configuration.DetectProperty) Optional(java.util.Optional) NameVersion(com.synopsys.integration.util.NameVersion) ArrayList(java.util.ArrayList) ExecutableOutput(com.blackducksoftware.integration.hub.detect.util.executable.ExecutableOutput) DetectCodeLocation(com.blackducksoftware.integration.hub.detect.workflow.codelocation.DetectCodeLocation) Extraction(com.blackducksoftware.integration.hub.detect.workflow.extraction.Extraction) Executable(com.blackducksoftware.integration.hub.detect.util.executable.Executable) File(java.io.File)

Aggregations

StringUtils (org.apache.commons.lang3.StringUtils)29 HashMap (java.util.HashMap)10 List (java.util.List)10 Optional (java.util.Optional)10 Map (java.util.Map)9 Collectors (java.util.stream.Collectors)8 Set (java.util.Set)7 lombok.val (lombok.val)7 ArrayList (java.util.ArrayList)6 Arrays (java.util.Arrays)5 Stream (java.util.stream.Stream)5 ColumnSpec (com.thinkbiganalytics.util.ColumnSpec)3 Collection (java.util.Collection)3 HashSet (java.util.HashSet)3 Slf4j (lombok.extern.slf4j.Slf4j)3 FlowFile (org.apache.nifi.flowfile.FlowFile)3 CollectionUtils (org.apereo.cas.util.CollectionUtils)3 AlertException (com.synopsys.integration.alert.api.common.model.exception.AlertException)2 ThriftService (com.thinkbiganalytics.nifi.v2.thrift.ThriftService)2 IOException (java.io.IOException)2