use of org.joda.time.Duration in project druid by druid-io.
the class AppenderatorPlumber method startPersistThread.
private void startPersistThread() {
final Granularity segmentGranularity = schema.getGranularitySpec().getSegmentGranularity();
final Period windowPeriod = config.getWindowPeriod();
final DateTime truncatedNow = segmentGranularity.bucketStart(DateTimes.nowUtc());
final long windowMillis = windowPeriod.toStandardDuration().getMillis();
log.info("Expect to run at [%s]", DateTimes.nowUtc().plus(new Duration(System.currentTimeMillis(), segmentGranularity.increment(truncatedNow).getMillis() + windowMillis)));
String threadName = StringUtils.format("%s-overseer-%d", schema.getDataSource(), config.getShardSpec().getPartitionNum());
ThreadRenamingCallable<ScheduledExecutors.Signal> threadRenamingCallable = new ThreadRenamingCallable<ScheduledExecutors.Signal>(threadName) {
@Override
public ScheduledExecutors.Signal doCall() {
if (stopped) {
log.info("Stopping merge-n-push overseer thread");
return ScheduledExecutors.Signal.STOP;
}
mergeAndPush();
if (stopped) {
log.info("Stopping merge-n-push overseer thread");
return ScheduledExecutors.Signal.STOP;
} else {
return ScheduledExecutors.Signal.REPEAT;
}
}
};
Duration initialDelay = new Duration(System.currentTimeMillis(), segmentGranularity.increment(truncatedNow).getMillis() + windowMillis);
Duration rate = new Duration(truncatedNow, segmentGranularity.increment(truncatedNow));
ScheduledExecutors.scheduleAtFixedRate(scheduledExecutor, initialDelay, rate, threadRenamingCallable);
}
use of org.joda.time.Duration in project druid by druid-io.
the class FileRequestLogger method start.
@LifecycleStart
@Override
public void start() {
try {
FileUtils.mkdirp(baseDir);
MutableDateTime mutableDateTime = DateTimes.nowUtc().toMutableDateTime(ISOChronology.getInstanceUTC());
mutableDateTime.setMillisOfDay(0);
synchronized (lock) {
currentDay = mutableDateTime.toDateTime(ISOChronology.getInstanceUTC());
fileWriter = getFileWriter();
}
long nextDay = currentDay.plusDays(1).getMillis();
Duration initialDelay = new Duration(nextDay - System.currentTimeMillis());
ScheduledExecutors.scheduleWithFixedDelay(exec, initialDelay, Duration.standardDays(1), new Callable<ScheduledExecutors.Signal>() {
@Override
public ScheduledExecutors.Signal call() {
try {
synchronized (lock) {
currentDay = currentDay.plusDays(1);
CloseableUtils.closeAndSuppressExceptions(fileWriter, e -> log.warn("Could not close log file for %s. Creating new log file anyway.", currentDay));
fileWriter = getFileWriter();
}
} catch (Exception e) {
throw new RuntimeException(e);
}
return ScheduledExecutors.Signal.REPEAT;
}
});
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of org.joda.time.Duration in project druid by druid-io.
the class DruidCoordinatorTest method testInitializeCompactSegmentsDutyWhenCustomDutyGroupDoesNotContainsCompactSegments.
@Test
public void testInitializeCompactSegmentsDutyWhenCustomDutyGroupDoesNotContainsCompactSegments() {
CoordinatorCustomDutyGroup group = new CoordinatorCustomDutyGroup("group1", Duration.standardSeconds(1), ImmutableList.of(new KillSupervisorsCustomDuty(new Duration("PT1S"), null)));
CoordinatorCustomDutyGroups customDutyGroups = new CoordinatorCustomDutyGroups(ImmutableSet.of(group));
coordinator = new DruidCoordinator(druidCoordinatorConfig, new ZkPathsConfig() {
@Override
public String getBase() {
return "druid";
}
}, null, segmentsMetadataManager, serverInventoryView, metadataRuleManager, () -> curator, serviceEmitter, scheduledExecutorFactory, null, null, new NoopServiceAnnouncer() {
@Override
public void announce(DruidNode node) {
// count down when this coordinator becomes the leader
leaderAnnouncerLatch.countDown();
}
@Override
public void unannounce(DruidNode node) {
leaderUnannouncerLatch.countDown();
}
}, druidNode, loadManagementPeons, ImmutableSet.of(), new HashSet<>(), customDutyGroups, new CostBalancerStrategyFactory(), EasyMock.createNiceMock(LookupCoordinatorManager.class), new TestDruidLeaderSelector(), null, ZkEnablementConfig.ENABLED);
// Since CompactSegments is not enabled in Custom Duty Group, then CompactSegments must be created in IndexingServiceDuties
List<CoordinatorDuty> indexingDuties = coordinator.makeIndexingServiceDuties();
Assert.assertTrue(indexingDuties.stream().anyMatch(coordinatorDuty -> coordinatorDuty instanceof CompactSegments));
// CompactSegments should not exist in Custom Duty Group
List<CompactSegments> compactSegmentsDutyFromCustomGroups = coordinator.getCompactSegmentsDutyFromCustomGroups();
Assert.assertTrue(compactSegmentsDutyFromCustomGroups.isEmpty());
// CompactSegments returned by this method should be created using the DruidCoordinatorConfig in the DruidCoordinator
CompactSegments duty = coordinator.initializeCompactSegmentsDuty();
Assert.assertNotNull(duty);
Assert.assertEquals(druidCoordinatorConfig.getCompactionSkipLockedIntervals(), duty.isSkipLockedIntervals());
}
use of org.joda.time.Duration in project beam by apache.
the class WorkerCustomSourcesTest method testReadUnboundedReader.
@Test
public void testReadUnboundedReader() throws Exception {
CounterSet counterSet = new CounterSet();
StreamingModeExecutionStateRegistry executionStateRegistry = new StreamingModeExecutionStateRegistry(null);
ReaderCache readerCache = new ReaderCache(Duration.standardMinutes(1), Runnable::run);
StreamingModeExecutionContext context = new StreamingModeExecutionContext(counterSet, "computationId", readerCache, /*stateNameMap=*/
ImmutableMap.of(), /*stateCache=*/
null, StreamingStepMetricsContainer.createRegistry(), new DataflowExecutionStateTracker(ExecutionStateSampler.newForTest(), executionStateRegistry.getState(NameContext.forStage("stageName"), "other", null, NoopProfileScope.NOOP), counterSet, PipelineOptionsFactory.create(), "test-work-item-id"), executionStateRegistry, Long.MAX_VALUE);
options.setNumWorkers(5);
int maxElements = 10;
DataflowPipelineDebugOptions debugOptions = options.as(DataflowPipelineDebugOptions.class);
debugOptions.setUnboundedReaderMaxElements(maxElements);
ByteString state = ByteString.EMPTY;
for (int i = 0; i < 10 * maxElements; ) /* Incremented in inner loop */
{
// Initialize streaming context with state from previous iteration.
context.start("key", Windmill.WorkItem.newBuilder().setKey(// key is zero-padded index.
ByteString.copyFromUtf8("0000000000000001")).setWorkToken(// Must be increasing across activations for cache to be used.
i).setCacheToken(1).setSourceState(// Source state.
Windmill.SourceState.newBuilder().setState(state).build()).build(), // input watermark
new Instant(0), // output watermark
null, // synchronized processing time
null, // StateReader
null, // StateFetcher
null, Windmill.WorkItemCommitRequest.newBuilder());
@SuppressWarnings({ "unchecked", "rawtypes" }) NativeReader<WindowedValue<ValueWithRecordId<KV<Integer, Integer>>>> reader = (NativeReader) WorkerCustomSources.create((CloudObject) serializeToCloudSource(new TestCountingSource(Integer.MAX_VALUE), options).getSpec(), options, context);
// Verify data.
Instant beforeReading = Instant.now();
int numReadOnThisIteration = 0;
for (WindowedValue<ValueWithRecordId<KV<Integer, Integer>>> value : ReaderUtils.readAllFromReader(reader)) {
assertEquals(KV.of(0, i), value.getValue().getValue());
assertArrayEquals(encodeToByteArray(KvCoder.of(VarIntCoder.of(), VarIntCoder.of()), KV.of(0, i)), value.getValue().getId());
assertThat(value.getWindows(), contains((BoundedWindow) GlobalWindow.INSTANCE));
assertEquals(i, value.getTimestamp().getMillis());
i++;
numReadOnThisIteration++;
}
Instant afterReading = Instant.now();
long maxReadSec = debugOptions.getUnboundedReaderMaxReadTimeSec();
assertThat(new Duration(beforeReading, afterReading).getStandardSeconds(), lessThanOrEqualTo(maxReadSec + 1));
assertThat(numReadOnThisIteration, lessThanOrEqualTo(debugOptions.getUnboundedReaderMaxElements()));
// Extract and verify state modifications.
context.flushState();
state = context.getOutputBuilder().getSourceStateUpdates().getState();
// CountingSource's watermark is the last record + 1. i is now one past the last record,
// so the expected watermark is i millis.
assertEquals(TimeUnit.MILLISECONDS.toMicros(i), context.getOutputBuilder().getSourceWatermark());
assertEquals(1, context.getOutputBuilder().getSourceStateUpdates().getFinalizeIdsList().size());
assertNotNull(readerCache.acquireReader(context.getComputationKey(), context.getWork().getCacheToken(), context.getWorkToken() + 1));
assertEquals(7L, context.getBacklogBytes());
}
}
use of org.joda.time.Duration in project beam by apache.
the class CommonCoderTest method convertValue.
/**
* Converts from JSON-auto-deserialized types into the proper Java types for the known coders.
*/
private static Object convertValue(Object value, CommonCoder coderSpec, Coder coder) {
String s = coderSpec.getUrn();
if (s.equals(getUrn(StandardCoders.Enum.BYTES))) {
return ((String) value).getBytes(StandardCharsets.ISO_8859_1);
} else if (s.equals(getUrn(StandardCoders.Enum.BOOL))) {
return value;
} else if (s.equals(getUrn(StandardCoders.Enum.STRING_UTF8))) {
return value;
} else if (s.equals(getUrn(StandardCoders.Enum.KV))) {
Coder keyCoder = ((KvCoder) coder).getKeyCoder();
Coder valueCoder = ((KvCoder) coder).getValueCoder();
Map<String, Object> kvMap = (Map<String, Object>) value;
Object k = convertValue(kvMap.get("key"), coderSpec.getComponents().get(0), keyCoder);
Object v = convertValue(kvMap.get("value"), coderSpec.getComponents().get(1), valueCoder);
return KV.of(k, v);
} else if (s.equals(getUrn(StandardCoders.Enum.VARINT))) {
return ((Number) value).longValue();
} else if (s.equals(getUrn(StandardCoders.Enum.TIMER))) {
Map<String, Object> kvMap = (Map<String, Object>) value;
Coder<?> keyCoder = ((Timer.Coder) coder).getValueCoder();
Coder<? extends BoundedWindow> windowCoder = ((Timer.Coder) coder).getWindowCoder();
List<BoundedWindow> windows = new ArrayList<>();
for (Object window : (List<Object>) kvMap.get("windows")) {
windows.add((BoundedWindow) convertValue(window, coderSpec.getComponents().get(1), windowCoder));
}
if ((boolean) kvMap.get("clearBit")) {
return Timer.cleared(convertValue(kvMap.get("userKey"), coderSpec.getComponents().get(0), keyCoder), (String) kvMap.get("dynamicTimerTag"), windows);
}
Map<String, Object> paneInfoMap = (Map<String, Object>) kvMap.get("pane");
PaneInfo paneInfo = PaneInfo.createPane((boolean) paneInfoMap.get("is_first"), (boolean) paneInfoMap.get("is_last"), PaneInfo.Timing.valueOf((String) paneInfoMap.get("timing")), (int) paneInfoMap.get("index"), (int) paneInfoMap.get("on_time_index"));
return Timer.of(convertValue(kvMap.get("userKey"), coderSpec.getComponents().get(0), keyCoder), (String) kvMap.get("dynamicTimerTag"), windows, new Instant(((Number) kvMap.get("fireTimestamp")).longValue()), new Instant(((Number) kvMap.get("holdTimestamp")).longValue()), paneInfo);
} else if (s.equals(getUrn(StandardCoders.Enum.INTERVAL_WINDOW))) {
Map<String, Object> kvMap = (Map<String, Object>) value;
Instant end = new Instant(((Number) kvMap.get("end")).longValue());
Duration span = Duration.millis(((Number) kvMap.get("span")).longValue());
return new IntervalWindow(end.minus(span), span);
} else if (s.equals(getUrn(StandardCoders.Enum.ITERABLE)) || s.equals(getUrn(StandardCoders.Enum.STATE_BACKED_ITERABLE))) {
Coder elementCoder = ((IterableLikeCoder) coder).getElemCoder();
List<Object> elements = (List<Object>) value;
List<Object> convertedElements = new ArrayList<>();
for (Object element : elements) {
convertedElements.add(convertValue(element, coderSpec.getComponents().get(0), elementCoder));
}
return convertedElements;
} else if (s.equals(getUrn(StandardCoders.Enum.GLOBAL_WINDOW))) {
return GlobalWindow.INSTANCE;
} else if (s.equals(getUrn(StandardCoders.Enum.WINDOWED_VALUE)) || s.equals(getUrn(StandardCoders.Enum.PARAM_WINDOWED_VALUE))) {
Map<String, Object> kvMap = (Map<String, Object>) value;
Coder valueCoder = ((WindowedValue.FullWindowedValueCoder) coder).getValueCoder();
Coder windowCoder = ((WindowedValue.FullWindowedValueCoder) coder).getWindowCoder();
Object windowValue = convertValue(kvMap.get("value"), coderSpec.getComponents().get(0), valueCoder);
Instant timestamp = new Instant(((Number) kvMap.get("timestamp")).longValue());
List<BoundedWindow> windows = new ArrayList<>();
for (Object window : (List<Object>) kvMap.get("windows")) {
windows.add((BoundedWindow) convertValue(window, coderSpec.getComponents().get(1), windowCoder));
}
Map<String, Object> paneInfoMap = (Map<String, Object>) kvMap.get("pane");
PaneInfo paneInfo = PaneInfo.createPane((boolean) paneInfoMap.get("is_first"), (boolean) paneInfoMap.get("is_last"), PaneInfo.Timing.valueOf((String) paneInfoMap.get("timing")), (int) paneInfoMap.get("index"), (int) paneInfoMap.get("on_time_index"));
return WindowedValue.of(windowValue, timestamp, windows, paneInfo);
} else if (s.equals(getUrn(StandardCoders.Enum.DOUBLE))) {
return Double.parseDouble((String) value);
} else if (s.equals(getUrn(StandardCoders.Enum.ROW))) {
Schema schema;
try {
schema = SchemaTranslation.schemaFromProto(SchemaApi.Schema.parseFrom(coderSpec.getPayload()));
} catch (InvalidProtocolBufferException e) {
throw new RuntimeException("Failed to parse schema payload for row coder", e);
}
return parseField(value, Schema.FieldType.row(schema));
} else if (s.equals(getUrn(StandardCoders.Enum.SHARDED_KEY))) {
Map<String, Object> kvMap = (Map<String, Object>) value;
Coder<?> keyCoder = ((ShardedKey.Coder) coder).getKeyCoder();
byte[] shardId = ((String) kvMap.get("shardId")).getBytes(StandardCharsets.ISO_8859_1);
return ShardedKey.of(convertValue(kvMap.get("key"), coderSpec.getComponents().get(0), keyCoder), shardId);
} else if (s.equals(getUrn(StandardCoders.Enum.CUSTOM_WINDOW))) {
Map<String, Object> kvMap = (Map<String, Object>) value;
Coder windowCoder = ((TimestampPrefixingWindowCoder) coder).getWindowCoder();
return convertValue(kvMap.get("window"), coderSpec.getComponents().get(0), windowCoder);
} else {
throw new IllegalStateException("Unknown coder URN: " + coderSpec.getUrn());
}
}
Aggregations