use of org.joda.time.Duration in project pinot by linkedin.
the class TimeRetentionStrategy method isPurgeable.
@Override
public boolean isPurgeable(SegmentZKMetadata segmentZKMetadata) {
if (_retentionDuration == null || _retentionDuration.getMillis() <= 0) {
return false;
}
try {
TimeUnit segmentTimeUnit = segmentZKMetadata.getTimeUnit();
if (segmentTimeUnit == null) {
return false;
}
long endsMillis = segmentTimeUnit.toMillis(segmentZKMetadata.getEndTime());
// Check that the date in the segment is between 1971 and 2071, as a sanity check for misconfigured time units.
if (!TimeUtils.timeValueInValidRange(endsMillis)) {
LOGGER.warn("Skipping purge check for segment {}, timestamp {} {} fails sanity check.", segmentZKMetadata.getSegmentName(), segmentZKMetadata.getEndTime(), segmentZKMetadata.getTimeUnit());
return false;
}
Duration segmentTimeUntilNow = new Duration(endsMillis, System.currentTimeMillis());
if (_retentionDuration.isShorterThan(segmentTimeUntilNow)) {
return true;
}
} catch (Exception e) {
LOGGER.warn("Caught exception while checking if a segment is purgeable", e);
return false;
}
return false;
}
use of org.joda.time.Duration in project pinot by linkedin.
the class ValidationManagerTest method testExtremeSenario.
@Test
public void testExtremeSenario() {
List<Interval> intervals = new ArrayList<Interval>();
intervals.add(new Interval(1, 2));
intervals.add(new Interval(Integer.MAX_VALUE - 5, Integer.MAX_VALUE));
intervals.add(new Interval(Integer.MAX_VALUE / 2 - 5, Integer.MAX_VALUE / 2));
Duration frequency = new Duration(1);
List<Interval> computeMissingIntervals = ValidationManager.computeMissingIntervals(intervals, frequency);
Assert.assertEquals(computeMissingIntervals.size(), 22);
}
use of org.joda.time.Duration in project pinot by linkedin.
the class RetentionManagerTest method getTimeSegmentMetadataImpl.
private SegmentMetadata getTimeSegmentMetadataImpl(final String startTime, final String endTime, final String timeUnit) {
if (startTime == null || endTime == null || timeUnit == null) {
long startTimeValue = System.currentTimeMillis();
return getTimeSegmentMetadataImpl(startTimeValue + "", startTimeValue + "", TimeUnit.MILLISECONDS.toString());
}
final long creationTime = System.currentTimeMillis();
final String segmentName = _testTableName + creationTime;
SegmentMetadata segmentMetadata = new SegmentMetadata() {
TimeUnit segmentTimeUnit = TimeUtils.timeUnitFromString(timeUnit);
Duration _timeGranularity = new Duration(segmentTimeUnit.toMillis(1));
Interval _timeInterval = new Interval(segmentTimeUnit.toMillis(Long.parseLong(startTime)), segmentTimeUnit.toMillis(Long.parseLong(endTime)));
@Override
public Map<String, String> toMap() {
final Map<String, String> ret = new HashMap<String, String>();
ret.put(V1Constants.MetadataKeys.Segment.TABLE_NAME, getTableName());
ret.put(V1Constants.MetadataKeys.Segment.SEGMENT_TOTAL_DOCS, String.valueOf(getTotalDocs()));
ret.put(V1Constants.MetadataKeys.Segment.SEGMENT_VERSION, getVersion());
ret.put(V1Constants.MetadataKeys.Segment.SEGMENT_NAME, getName());
ret.put(V1Constants.MetadataKeys.Segment.SEGMENT_CRC, getCrc());
ret.put(V1Constants.MetadataKeys.Segment.SEGMENT_CREATION_TIME, getIndexCreationTime() + "");
ret.put(V1Constants.MetadataKeys.Segment.SEGMENT_START_TIME, startTime);
ret.put(V1Constants.MetadataKeys.Segment.SEGMENT_END_TIME, endTime);
ret.put(V1Constants.MetadataKeys.Segment.TIME_UNIT, timeUnit);
return ret;
}
@Override
public String getVersion() {
return SegmentVersion.v1.toString();
}
@Override
public int getTotalDocs() {
return 0;
}
@Override
public int getTotalRawDocs() {
return 0;
}
@Override
public Interval getTimeInterval() {
return _timeInterval;
}
@Override
public Duration getTimeGranularity() {
return _timeGranularity;
}
@Override
public String getShardingKey() {
return null;
}
@Override
public Schema getSchema() {
return null;
}
@Override
public String getTableName() {
return _testTableName;
}
@Override
public String getName() {
return segmentName;
}
@Override
public String getIndexType() {
return "offline";
}
@Override
public String getTimeColumn() {
return null;
}
@Override
public long getStartTime() {
return Long.valueOf(startTime);
}
@Override
public long getEndTime() {
return Long.valueOf(endTime);
}
@Override
public TimeUnit getTimeUnit() {
return segmentTimeUnit;
}
@Override
public String getIndexDir() {
return null;
}
@Override
public long getIndexCreationTime() {
return creationTime;
}
@Override
public String getCrc() {
return creationTime + "";
}
@Override
public long getPushTime() {
return Long.MIN_VALUE;
}
@Override
public long getRefreshTime() {
return Long.MIN_VALUE;
}
@Override
public boolean hasDictionary(String columnName) {
return false;
}
@Override
public boolean hasStarTree() {
return false;
}
@Override
public StarTreeMetadata getStarTreeMetadata() {
return null;
}
@Override
public boolean close() {
return false;
}
@Override
public String getForwardIndexFileName(String column, String segmentVersion) {
throw new UnsupportedOperationException("getForwardIndexFileName not supported in " + this.getClass());
}
@Override
public String getDictionaryFileName(String column, String segmentVersion) {
throw new UnsupportedOperationException("getDictionaryFileName not supported in " + this.getClass());
}
@Override
public String getBitmapInvertedIndexFileName(String column, String segmentVersion) {
throw new UnsupportedOperationException("getBitmapInvertedIndexFileName not supported in " + this.getClass());
}
@Nullable
@Override
public String getCreatorName() {
return null;
}
@Override
public char getPaddingCharacter() {
return V1Constants.Str.DEFAULT_STRING_PAD_CHAR;
}
@Override
public int getHllLog2m() {
return HllConstants.DEFAULT_LOG2M;
}
@Nullable
@Override
public String getDerivedColumn(String column, MetricFieldSpec.DerivedMetricType derivedMetricType) {
return null;
}
};
return segmentMetadata;
}
use of org.joda.time.Duration in project head by mifos.
the class DateTimeServiceTest method testGetCurrentJavaDateTime.
@Test
public void testGetCurrentJavaDateTime() {
DateTimeService dateTimeService = new DateTimeService();
DateTime systemDateTime = new DateTime(System.currentTimeMillis());
DateTime timeServiceDateTime = new DateTime(dateTimeService.getCurrentJavaDateTime());
Duration duration = new Duration(systemDateTime, timeServiceDateTime);
// the dates returned should be equal or very close
Assert.assertTrue("Expected the java date time returned by DateTimeService to be equal or close to the actual system time", duration.getMillis() < ONE_SECOND);
}
use of org.joda.time.Duration in project druid by druid-io.
the class DruidCoordinatorConfigTest method testDeserialization.
@Test
public void testDeserialization() throws Exception {
ConfigurationObjectFactory factory = Config.createFactory(new Properties());
//with defaults
DruidCoordinatorConfig config = factory.build(DruidCoordinatorConfig.class);
Assert.assertEquals(new Duration("PT300s"), config.getCoordinatorStartDelay());
Assert.assertEquals(new Duration("PT60s"), config.getCoordinatorPeriod());
Assert.assertEquals(new Duration("PT1800s"), config.getCoordinatorIndexingPeriod());
Assert.assertFalse(config.isMergeSegments());
Assert.assertFalse(config.isConvertSegments());
Assert.assertFalse(config.isKillSegments());
Assert.assertEquals(86400000, config.getCoordinatorKillPeriod().getMillis());
Assert.assertEquals(-1000, config.getCoordinatorKillDurationToRetain().getMillis());
Assert.assertEquals(0, config.getCoordinatorKillMaxSegments());
Assert.assertEquals(new Duration(15 * 60 * 1000), config.getLoadTimeoutDelay());
Assert.assertNull(config.getConsoleStatic());
Assert.assertEquals(Duration.millis(50), config.getLoadQueuePeonRepeatDelay());
//with non-defaults
Properties props = new Properties();
props.setProperty("druid.coordinator.startDelay", "PT1s");
props.setProperty("druid.coordinator.period", "PT1s");
props.setProperty("druid.coordinator.period.indexingPeriod", "PT1s");
props.setProperty("druid.coordinator.merge.on", "true");
props.setProperty("druid.coordinator.conversion.on", "true");
props.setProperty("druid.coordinator.kill.on", "true");
props.setProperty("druid.coordinator.kill.period", "PT1s");
props.setProperty("druid.coordinator.kill.durationToRetain", "PT1s");
props.setProperty("druid.coordinator.kill.maxSegments", "10000");
props.setProperty("druid.coordinator.load.timeout", "PT1s");
props.setProperty("druid.coordinator.console.static", "test");
props.setProperty("druid.coordinator.loadqueuepeon.repeatDelay", "PT0.100s");
factory = Config.createFactory(props);
config = factory.build(DruidCoordinatorConfig.class);
Assert.assertEquals(new Duration("PT1s"), config.getCoordinatorStartDelay());
Assert.assertEquals(new Duration("PT1s"), config.getCoordinatorPeriod());
Assert.assertEquals(new Duration("PT1s"), config.getCoordinatorIndexingPeriod());
Assert.assertTrue(config.isMergeSegments());
Assert.assertTrue(config.isConvertSegments());
Assert.assertTrue(config.isKillSegments());
Assert.assertEquals(new Duration("PT1s"), config.getCoordinatorKillPeriod());
Assert.assertEquals(new Duration("PT1s"), config.getCoordinatorKillDurationToRetain());
Assert.assertEquals(10000, config.getCoordinatorKillMaxSegments());
Assert.assertEquals(new Duration("PT1s"), config.getLoadTimeoutDelay());
Assert.assertEquals("test", config.getConsoleStatic());
Assert.assertEquals(Duration.millis(100), config.getLoadQueuePeonRepeatDelay());
}
Aggregations