Search in sources :

Example 31 with Duration

use of org.joda.time.Duration in project beam by apache.

the class DoFnOperatorTest method testLateDroppingForStatefulFn.

@Test
public void testLateDroppingForStatefulFn() throws Exception {
    WindowingStrategy<Object, IntervalWindow> windowingStrategy = WindowingStrategy.of(FixedWindows.of(new Duration(10)));
    DoFn<Integer, String> fn = new DoFn<Integer, String>() {

        @StateId("state")
        private final StateSpec<ValueState<String>> stateSpec = StateSpecs.value(StringUtf8Coder.of());

        @ProcessElement
        public void processElement(ProcessContext context) {
            context.output(context.element().toString());
        }
    };
    WindowedValue.FullWindowedValueCoder<Integer> windowedValueCoder = WindowedValue.getFullCoder(VarIntCoder.of(), windowingStrategy.getWindowFn().windowCoder());
    TupleTag<String> outputTag = new TupleTag<>("main-output");
    DoFnOperator<Integer, String, WindowedValue<String>> doFnOperator = new DoFnOperator<>(fn, "stepName", windowedValueCoder, outputTag, Collections.<TupleTag<?>>emptyList(), new DoFnOperator.DefaultOutputManagerFactory<WindowedValue<String>>(), windowingStrategy, new HashMap<Integer, PCollectionView<?>>(), /* side-input mapping */
    Collections.<PCollectionView<?>>emptyList(), /* side inputs */
    PipelineOptionsFactory.as(FlinkPipelineOptions.class), VarIntCoder.of());
    OneInputStreamOperatorTestHarness<WindowedValue<Integer>, WindowedValue<String>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(doFnOperator, new KeySelector<WindowedValue<Integer>, Integer>() {

        @Override
        public Integer getKey(WindowedValue<Integer> integerWindowedValue) throws Exception {
            return integerWindowedValue.getValue();
        }
    }, new CoderTypeInformation<>(VarIntCoder.of()));
    testHarness.open();
    testHarness.processWatermark(0);
    IntervalWindow window1 = new IntervalWindow(new Instant(0), Duration.millis(10));
    // this should not be late
    testHarness.processElement(new StreamRecord<>(WindowedValue.of(13, new Instant(0), window1, PaneInfo.NO_FIRING)));
    assertThat(this.<String>stripStreamRecordFromWindowedValue(testHarness.getOutput()), contains(WindowedValue.of("13", new Instant(0), window1, PaneInfo.NO_FIRING)));
    testHarness.getOutput().clear();
    testHarness.processWatermark(9);
    // this should still not be considered late
    testHarness.processElement(new StreamRecord<>(WindowedValue.of(17, new Instant(0), window1, PaneInfo.NO_FIRING)));
    assertThat(this.<String>stripStreamRecordFromWindowedValue(testHarness.getOutput()), contains(WindowedValue.of("17", new Instant(0), window1, PaneInfo.NO_FIRING)));
    testHarness.getOutput().clear();
    testHarness.processWatermark(10);
    // this should now be considered late
    testHarness.processElement(new StreamRecord<>(WindowedValue.of(17, new Instant(0), window1, PaneInfo.NO_FIRING)));
    assertThat(this.<String>stripStreamRecordFromWindowedValue(testHarness.getOutput()), emptyIterable());
    testHarness.close();
}
Also used : TupleTag(org.apache.beam.sdk.values.TupleTag) FlinkPipelineOptions(org.apache.beam.runners.flink.FlinkPipelineOptions) DoFnOperator(org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) StateSpec(org.apache.beam.sdk.state.StateSpec) WindowedValue(org.apache.beam.sdk.util.WindowedValue) IntervalWindow(org.apache.beam.sdk.transforms.windowing.IntervalWindow) Instant(org.joda.time.Instant) Duration(org.joda.time.Duration) PCollectionView(org.apache.beam.sdk.values.PCollectionView) DoFn(org.apache.beam.sdk.transforms.DoFn) Test(org.junit.Test)

Example 32 with Duration

use of org.joda.time.Duration in project beam by apache.

the class DoFnOperatorTest method testStateGCForStatefulFn.

@Test
public void testStateGCForStatefulFn() throws Exception {
    WindowingStrategy<Object, IntervalWindow> windowingStrategy = WindowingStrategy.of(FixedWindows.of(new Duration(10))).withAllowedLateness(Duration.ZERO);
    final String timerId = "boo";
    final String stateId = "dazzle";
    final int offset = 5000;
    final int timerOutput = 4093;
    DoFn<KV<String, Integer>, KV<String, Integer>> fn = new DoFn<KV<String, Integer>, KV<String, Integer>>() {

        @TimerId(timerId)
        private final TimerSpec spec = TimerSpecs.timer(TimeDomain.EVENT_TIME);

        @StateId(stateId)
        private final StateSpec<ValueState<String>> stateSpec = StateSpecs.value(StringUtf8Coder.of());

        @ProcessElement
        public void processElement(ProcessContext context, @TimerId(timerId) Timer timer, @StateId(stateId) ValueState<String> state, BoundedWindow window) {
            timer.set(window.maxTimestamp());
            state.write(context.element().getKey());
            context.output(KV.of(context.element().getKey(), context.element().getValue() + offset));
        }

        @OnTimer(timerId)
        public void onTimer(OnTimerContext context, @StateId(stateId) ValueState<String> state) {
            context.output(KV.of(state.read(), timerOutput));
        }
    };
    WindowedValue.FullWindowedValueCoder<KV<String, Integer>> windowedValueCoder = WindowedValue.getFullCoder(KvCoder.of(StringUtf8Coder.of(), VarIntCoder.of()), windowingStrategy.getWindowFn().windowCoder());
    TupleTag<KV<String, Integer>> outputTag = new TupleTag<>("main-output");
    DoFnOperator<KV<String, Integer>, KV<String, Integer>, WindowedValue<KV<String, Integer>>> doFnOperator = new DoFnOperator<>(fn, "stepName", windowedValueCoder, outputTag, Collections.<TupleTag<?>>emptyList(), new DoFnOperator.DefaultOutputManagerFactory<WindowedValue<KV<String, Integer>>>(), windowingStrategy, new HashMap<Integer, PCollectionView<?>>(), /* side-input mapping */
    Collections.<PCollectionView<?>>emptyList(), /* side inputs */
    PipelineOptionsFactory.as(FlinkPipelineOptions.class), StringUtf8Coder.of());
    KeyedOneInputStreamOperatorTestHarness<String, WindowedValue<KV<String, Integer>>, WindowedValue<KV<String, Integer>>> testHarness = new KeyedOneInputStreamOperatorTestHarness<>(doFnOperator, new KeySelector<WindowedValue<KV<String, Integer>>, String>() {

        @Override
        public String getKey(WindowedValue<KV<String, Integer>> kvWindowedValue) throws Exception {
            return kvWindowedValue.getValue().getKey();
        }
    }, new CoderTypeInformation<>(StringUtf8Coder.of()));
    testHarness.open();
    testHarness.processWatermark(0);
    assertEquals(0, testHarness.numKeyedStateEntries());
    IntervalWindow window1 = new IntervalWindow(new Instant(0), Duration.millis(10));
    testHarness.processElement(new StreamRecord<>(WindowedValue.of(KV.of("key1", 5), new Instant(1), window1, PaneInfo.NO_FIRING)));
    testHarness.processElement(new StreamRecord<>(WindowedValue.of(KV.of("key2", 7), new Instant(3), window1, PaneInfo.NO_FIRING)));
    assertThat(this.<KV<String, Integer>>stripStreamRecordFromWindowedValue(testHarness.getOutput()), contains(WindowedValue.of(KV.of("key1", 5 + offset), new Instant(1), window1, PaneInfo.NO_FIRING), WindowedValue.of(KV.of("key2", 7 + offset), new Instant(3), window1, PaneInfo.NO_FIRING)));
    assertEquals(2, testHarness.numKeyedStateEntries());
    testHarness.getOutput().clear();
    // this should trigger both the window.maxTimestamp() timer and the GC timer
    // this tests that the GC timer fires after the user timer
    testHarness.processWatermark(window1.maxTimestamp().plus(windowingStrategy.getAllowedLateness()).plus(StatefulDoFnRunner.TimeInternalsCleanupTimer.GC_DELAY_MS).getMillis());
    assertThat(this.<KV<String, Integer>>stripStreamRecordFromWindowedValue(testHarness.getOutput()), contains(WindowedValue.of(KV.of("key1", timerOutput), new Instant(9), window1, PaneInfo.NO_FIRING), WindowedValue.of(KV.of("key2", timerOutput), new Instant(9), window1, PaneInfo.NO_FIRING)));
    // ensure the state was garbage collected
    assertEquals(0, testHarness.numKeyedStateEntries());
    testHarness.close();
}
Also used : TupleTag(org.apache.beam.sdk.values.TupleTag) FlinkPipelineOptions(org.apache.beam.runners.flink.FlinkPipelineOptions) DoFnOperator(org.apache.beam.runners.flink.translation.wrappers.streaming.DoFnOperator) KeyedOneInputStreamOperatorTestHarness(org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness) StateSpec(org.apache.beam.sdk.state.StateSpec) WindowedValue(org.apache.beam.sdk.util.WindowedValue) BoundedWindow(org.apache.beam.sdk.transforms.windowing.BoundedWindow) IntervalWindow(org.apache.beam.sdk.transforms.windowing.IntervalWindow) TimerSpec(org.apache.beam.sdk.state.TimerSpec) Instant(org.joda.time.Instant) Duration(org.joda.time.Duration) KV(org.apache.beam.sdk.values.KV) PCollectionView(org.apache.beam.sdk.values.PCollectionView) DoFn(org.apache.beam.sdk.transforms.DoFn) ValueState(org.apache.beam.sdk.state.ValueState) Timer(org.apache.beam.sdk.state.Timer) Test(org.junit.Test)

Example 33 with Duration

use of org.joda.time.Duration in project beam by apache.

the class DataflowPipelineJob method waitUntilFinish.

/**
   * Waits until the pipeline finishes and returns the final status.
   *
   * @param duration The time to wait for the job to finish.
   *     Provide a value less than 1 ms for an infinite wait.
   *
   * @param messageHandler If non null this handler will be invoked for each
   *   batch of messages received.
   * @param sleeper A sleeper to use to sleep between attempts.
   * @param nanoClock A nanoClock used to time the total time taken.
   * @return The final state of the job or null on timeout.
   * @throws IOException If there is a persistent problem getting job
   *   information.
   * @throws InterruptedException if the thread is interrupted.
   */
@Nullable
@VisibleForTesting
State waitUntilFinish(Duration duration, @Nullable MonitoringUtil.JobMessagesHandler messageHandler, Sleeper sleeper, NanoClock nanoClock, MonitoringUtil monitor) throws IOException, InterruptedException {
    BackOff backoff;
    if (!duration.isLongerThan(Duration.ZERO)) {
        backoff = BackOffAdapter.toGcpBackOff(MESSAGES_BACKOFF_FACTORY.backoff());
    } else {
        backoff = BackOffAdapter.toGcpBackOff(MESSAGES_BACKOFF_FACTORY.withMaxCumulativeBackoff(duration).backoff());
    }
    // This function tracks the cumulative time from the *first request* to enforce the wall-clock
    // limit. Any backoff instance could, at best, track the the time since the first attempt at a
    // given request. Thus, we need to track the cumulative time ourselves.
    long startNanos = nanoClock.nanoTime();
    State state;
    do {
        // Get the state of the job before listing messages. This ensures we always fetch job
        // messages after the job finishes to ensure we have all them.
        state = getStateWithRetries(BackOffAdapter.toGcpBackOff(STATUS_BACKOFF_FACTORY.withMaxRetries(0).backoff()), sleeper);
        boolean hasError = state == State.UNKNOWN;
        if (messageHandler != null && !hasError) {
            // Process all the job messages that have accumulated so far.
            try {
                List<JobMessage> allMessages = monitor.getJobMessages(jobId, lastTimestamp);
                if (!allMessages.isEmpty()) {
                    lastTimestamp = fromCloudTime(allMessages.get(allMessages.size() - 1).getTime()).getMillis();
                    messageHandler.process(allMessages);
                }
            } catch (GoogleJsonResponseException | SocketTimeoutException e) {
                hasError = true;
                LOG.warn("There were problems getting current job messages: {}.", e.getMessage());
                LOG.debug("Exception information:", e);
            }
        }
        if (!hasError) {
            // We can stop if the job is done.
            if (state.isTerminal()) {
                switch(state) {
                    case DONE:
                    case CANCELLED:
                        LOG.info("Job {} finished with status {}.", getJobId(), state);
                        break;
                    case UPDATED:
                        LOG.info("Job {} has been updated and is running as the new job with id {}. " + "To access the updated job on the Dataflow monitoring console, " + "please navigate to {}", getJobId(), getReplacedByJob().getJobId(), MonitoringUtil.getJobMonitoringPageURL(getReplacedByJob().getProjectId(), getReplacedByJob().getJobId()));
                        break;
                    default:
                        LOG.info("Job {} failed with status {}.", getJobId(), state);
                }
                return state;
            }
            // The job is not done, so we must keep polling.
            backoff.reset();
            // allotted time.
            if (duration.isLongerThan(Duration.ZERO)) {
                long nanosConsumed = nanoClock.nanoTime() - startNanos;
                Duration consumed = Duration.millis((nanosConsumed + 999999) / 1000000);
                Duration remaining = duration.minus(consumed);
                if (remaining.isLongerThan(Duration.ZERO)) {
                    backoff = BackOffAdapter.toGcpBackOff(MESSAGES_BACKOFF_FACTORY.withMaxCumulativeBackoff(remaining).backoff());
                } else {
                    // If there is no time remaining, don't bother backing off.
                    backoff = BackOff.STOP_BACKOFF;
                }
            }
        }
    } while (BackOffUtils.next(sleeper, backoff));
    LOG.warn("No terminal state was returned. State value {}", state);
    // Timed out.
    return null;
}
Also used : GoogleJsonResponseException(com.google.api.client.googleapis.json.GoogleJsonResponseException) SocketTimeoutException(java.net.SocketTimeoutException) JobMessage(com.google.api.services.dataflow.model.JobMessage) Duration(org.joda.time.Duration) BackOff(com.google.api.client.util.BackOff) VisibleForTesting(com.google.common.annotations.VisibleForTesting) Nullable(javax.annotation.Nullable)

Example 34 with Duration

use of org.joda.time.Duration in project openhab1-addons by openhab.

the class SonosZonePlayer method setAlarm.

public boolean setAlarm(boolean alarmSwitch) {
    List<SonosAlarm> sonosAlarms = getCurrentAlarmList();
    if (isConfigured()) {
        // find the nearest alarm - take the current time from the Sonos System, not the system where openhab is
        // running
        String currentLocalTime = getTime();
        DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
        DateTime currentDateTime = fmt.parseDateTime(currentLocalTime);
        Duration shortestDuration = Period.days(10).toStandardDuration();
        SonosAlarm firstAlarm = null;
        for (SonosAlarm anAlarm : sonosAlarms) {
            Duration duration = new Duration(currentDateTime, anAlarm.getStartTime());
            if (anAlarm.getStartTime().isBefore(currentDateTime.plus(shortestDuration)) && anAlarm.getRoomUUID().equals(udn.getIdentifierString())) {
                shortestDuration = duration;
                firstAlarm = anAlarm;
            }
        }
        // Set the Alarm
        if (firstAlarm != null) {
            if (alarmSwitch) {
                firstAlarm.setEnabled(true);
            } else {
                firstAlarm.setEnabled(false);
            }
            return updateAlarm(firstAlarm);
        } else {
            return false;
        }
    } else {
        return false;
    }
}
Also used : Duration(org.joda.time.Duration) DateTimeFormatter(org.joda.time.format.DateTimeFormatter) DateTime(org.joda.time.DateTime)

Example 35 with Duration

use of org.joda.time.Duration in project gocd by gocd.

the class StageTest method shouldCalculateTotalTimeFromFirstScheduledJobToLastCompletedJob.

@Test
public void shouldCalculateTotalTimeFromFirstScheduledJobToLastCompletedJob() {
    final DateTime time0 = new DateTime(2008, 2, 22, 10, 21, 23, 0);
    timeProvider = new TimeProvider() {

        @Override
        public Date currentTime() {
            return time0.toDate();
        }

        public DateTime currentDateTime() {
            throw new UnsupportedOperationException("Not implemented");
        }

        public DateTime timeoutTime(Timeout timeout) {
            throw new UnsupportedOperationException("Not implemented");
        }
    };
    firstJob = new JobInstance("first-job", timeProvider);
    secondJob = new JobInstance("second-job", timeProvider);
    jobInstances = new JobInstances(firstJob, secondJob);
    stage = StageMother.custom("test", jobInstances);
    firstJob.assign("AGENT-1", time1.toDate());
    firstJob.completing(JobResult.Passed, time2.toDate());
    firstJob.completed(time2.toDate());
    secondJob.assign("AGENT-2", time3.toDate());
    secondJob.completing(JobResult.Passed, time4.toDate());
    secondJob.completed(time4.toDate());
    stage.calculateResult();
    stage.setCreatedTime(new Timestamp(time0.toDate().getTime()));
    stage.setLastTransitionedTime(new Timestamp(time4.toDate().getTime()));
    RunDuration.ActualDuration expectedDuration = new RunDuration.ActualDuration(new Duration(time0, time4));
    RunDuration.ActualDuration duration = (RunDuration.ActualDuration) stage.getDuration();
    assertThat(duration, is(expectedDuration));
    assertThat(duration.getTotalSeconds(), is(7263L));
}
Also used : TimeProvider(com.thoughtworks.go.util.TimeProvider) Timeout(com.thoughtworks.go.utils.Timeout) Duration(org.joda.time.Duration) Timestamp(java.sql.Timestamp) DateTime(org.joda.time.DateTime) Date(java.util.Date) Test(org.junit.Test)

Aggregations

Duration (org.joda.time.Duration)272 Test (org.junit.Test)148 Instant (org.joda.time.Instant)66 DateTime (org.joda.time.DateTime)32 Period (org.joda.time.Period)27 IntervalWindow (org.apache.beam.sdk.transforms.windowing.IntervalWindow)24 TestDruidCoordinatorConfig (org.apache.druid.server.coordinator.TestDruidCoordinatorConfig)22 HashMap (java.util.HashMap)18 IOException (java.io.IOException)17 Category (org.junit.experimental.categories.Category)16 ArrayList (java.util.ArrayList)15 Map (java.util.Map)15 KV (org.apache.beam.sdk.values.KV)15 AtomicReference (java.util.concurrent.atomic.AtomicReference)13 IndexSpec (org.apache.druid.segment.IndexSpec)12 Set (java.util.Set)10 GlobalWindows (org.apache.beam.sdk.transforms.windowing.GlobalWindows)10 DynamicPartitionsSpec (org.apache.druid.indexer.partitions.DynamicPartitionsSpec)10 Interval (org.joda.time.Interval)10 Request (com.metamx.http.client.Request)9