use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError in project hadoop by apache.
the class RollingLevelDBTimelineStore method putEntities.
/**
* Put a single entity. If there is an error, add a TimelinePutError to the
* given response.
*
* @param entityUpdates
* a map containing all the scheduled writes for this put to the
* entity db
* @param indexUpdates
* a map containing all the scheduled writes for this put to the
* index db
*/
private long putEntities(TreeMap<Long, RollingWriteBatch> entityUpdates, TreeMap<Long, RollingWriteBatch> indexUpdates, TimelineEntity entity, TimelinePutResponse response) {
long putCount = 0;
List<EntityIdentifier> relatedEntitiesWithoutStartTimes = new ArrayList<EntityIdentifier>();
byte[] revStartTime = null;
Map<String, Set<Object>> primaryFilters = null;
try {
List<TimelineEvent> events = entity.getEvents();
// look up the start time for the entity
Long startTime = getAndSetStartTime(entity.getEntityId(), entity.getEntityType(), entity.getStartTime(), events);
if (startTime == null) {
// if no start time is found, add an error and return
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.NO_START_TIME);
response.addError(error);
return putCount;
}
// Must have a domain
if (StringUtils.isEmpty(entity.getDomainId())) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.NO_DOMAIN);
response.addError(error);
return putCount;
}
revStartTime = writeReverseOrderedLong(startTime);
long roundedStartTime = entitydb.computeCurrentCheckMillis(startTime);
RollingWriteBatch rollingWriteBatch = entityUpdates.get(roundedStartTime);
if (rollingWriteBatch == null) {
DB db = entitydb.getDBForStartTime(startTime);
if (db != null) {
WriteBatch writeBatch = db.createWriteBatch();
rollingWriteBatch = new RollingWriteBatch(db, writeBatch);
entityUpdates.put(roundedStartTime, rollingWriteBatch);
}
}
if (rollingWriteBatch == null) {
// if no start time is found, add an error and return
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
response.addError(error);
return putCount;
}
WriteBatch writeBatch = rollingWriteBatch.getWriteBatch();
// Save off the getBytes conversion to avoid unnecessary cost
byte[] entityIdBytes = entity.getEntityId().getBytes(UTF_8);
byte[] entityTypeBytes = entity.getEntityType().getBytes(UTF_8);
byte[] domainIdBytes = entity.getDomainId().getBytes(UTF_8);
// write entity marker
byte[] markerKey = KeyBuilder.newInstance(3).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).getBytesForLookup();
writeBatch.put(markerKey, EMPTY_BYTES);
++putCount;
// write domain id entry
byte[] domainkey = KeyBuilder.newInstance(4).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(DOMAIN_ID_COLUMN).getBytes();
writeBatch.put(domainkey, domainIdBytes);
++putCount;
// write event entries
if (events != null) {
for (TimelineEvent event : events) {
byte[] revts = writeReverseOrderedLong(event.getTimestamp());
byte[] key = KeyBuilder.newInstance().add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(EVENTS_COLUMN).add(revts).add(event.getEventType().getBytes(UTF_8)).getBytes();
byte[] value = fstConf.asByteArray(event.getEventInfo());
writeBatch.put(key, value);
++putCount;
}
}
// write primary filter entries
primaryFilters = entity.getPrimaryFilters();
if (primaryFilters != null) {
for (Entry<String, Set<Object>> primaryFilter : primaryFilters.entrySet()) {
for (Object primaryFilterValue : primaryFilter.getValue()) {
byte[] key = KeyBuilder.newInstance(6).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(PRIMARY_FILTERS_COLUMN).add(primaryFilter.getKey()).add(fstConf.asByteArray(primaryFilterValue)).getBytes();
writeBatch.put(key, EMPTY_BYTES);
++putCount;
}
}
}
// write other info entries
Map<String, Object> otherInfo = entity.getOtherInfo();
if (otherInfo != null) {
for (Entry<String, Object> info : otherInfo.entrySet()) {
byte[] key = KeyBuilder.newInstance(5).add(entityTypeBytes, true).add(revStartTime).add(entityIdBytes, true).add(OTHER_INFO_COLUMN).add(info.getKey()).getBytes();
byte[] value = fstConf.asByteArray(info.getValue());
writeBatch.put(key, value);
++putCount;
}
}
// write related entity entries
Map<String, Set<String>> relatedEntities = entity.getRelatedEntities();
if (relatedEntities != null) {
for (Entry<String, Set<String>> relatedEntityList : relatedEntities.entrySet()) {
String relatedEntityType = relatedEntityList.getKey();
for (String relatedEntityId : relatedEntityList.getValue()) {
// look up start time of related entity
Long relatedStartTimeLong = getStartTimeLong(relatedEntityId, relatedEntityType);
// delay writing the related entity if no start time is found
if (relatedStartTimeLong == null) {
relatedEntitiesWithoutStartTimes.add(new EntityIdentifier(relatedEntityId, relatedEntityType));
continue;
}
byte[] relatedEntityStartTime = writeReverseOrderedLong(relatedStartTimeLong);
long relatedRoundedStartTime = entitydb.computeCurrentCheckMillis(relatedStartTimeLong);
RollingWriteBatch relatedRollingWriteBatch = entityUpdates.get(relatedRoundedStartTime);
if (relatedRollingWriteBatch == null) {
DB db = entitydb.getDBForStartTime(relatedStartTimeLong);
if (db != null) {
WriteBatch relatedWriteBatch = db.createWriteBatch();
relatedRollingWriteBatch = new RollingWriteBatch(db, relatedWriteBatch);
entityUpdates.put(relatedRoundedStartTime, relatedRollingWriteBatch);
}
}
if (relatedRollingWriteBatch == null) {
// if no start time is found, add an error and return
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
response.addError(error);
continue;
}
// This is the existing entity
byte[] relatedDomainIdBytes = relatedRollingWriteBatch.getDB().get(createDomainIdKey(relatedEntityId, relatedEntityType, relatedEntityStartTime));
// The timeline data created by the server before 2.6 won't have
// the domain field. We assume this timeline data is in the
// default timeline domain.
String domainId = null;
if (relatedDomainIdBytes == null) {
domainId = TimelineDataManager.DEFAULT_DOMAIN_ID;
} else {
domainId = new String(relatedDomainIdBytes, UTF_8);
}
if (!domainId.equals(entity.getDomainId())) {
// in this case the entity will be put, but the relation will be
// ignored
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
response.addError(error);
continue;
}
// write "forward" entry (related entity -> entity)
byte[] key = createRelatedEntityKey(relatedEntityId, relatedEntityType, relatedEntityStartTime, entity.getEntityId(), entity.getEntityType());
WriteBatch relatedWriteBatch = relatedRollingWriteBatch.getWriteBatch();
relatedWriteBatch.put(key, EMPTY_BYTES);
++putCount;
}
}
}
// write index entities
RollingWriteBatch indexRollingWriteBatch = indexUpdates.get(roundedStartTime);
if (indexRollingWriteBatch == null) {
DB db = indexdb.getDBForStartTime(startTime);
if (db != null) {
WriteBatch indexWriteBatch = db.createWriteBatch();
indexRollingWriteBatch = new RollingWriteBatch(db, indexWriteBatch);
indexUpdates.put(roundedStartTime, indexRollingWriteBatch);
}
}
if (indexRollingWriteBatch == null) {
// if no start time is found, add an error and return
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
response.addError(error);
return putCount;
}
WriteBatch indexWriteBatch = indexRollingWriteBatch.getWriteBatch();
putCount += writePrimaryFilterEntries(indexWriteBatch, primaryFilters, markerKey, EMPTY_BYTES);
} catch (IOException e) {
LOG.error("Error putting entity " + entity.getEntityId() + " of type " + entity.getEntityType(), e);
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.IO_EXCEPTION);
response.addError(error);
}
for (EntityIdentifier relatedEntity : relatedEntitiesWithoutStartTimes) {
try {
Long relatedEntityStartAndInsertTime = getAndSetStartTime(relatedEntity.getId(), relatedEntity.getType(), readReverseOrderedLong(revStartTime, 0), null);
if (relatedEntityStartAndInsertTime == null) {
throw new IOException("Error setting start time for related entity");
}
long relatedStartTimeLong = relatedEntityStartAndInsertTime;
long relatedRoundedStartTime = entitydb.computeCurrentCheckMillis(relatedStartTimeLong);
RollingWriteBatch relatedRollingWriteBatch = entityUpdates.get(relatedRoundedStartTime);
if (relatedRollingWriteBatch == null) {
DB db = entitydb.getDBForStartTime(relatedStartTimeLong);
if (db != null) {
WriteBatch relatedWriteBatch = db.createWriteBatch();
relatedRollingWriteBatch = new RollingWriteBatch(db, relatedWriteBatch);
entityUpdates.put(relatedRoundedStartTime, relatedRollingWriteBatch);
}
}
if (relatedRollingWriteBatch == null) {
// if no start time is found, add an error and return
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.EXPIRED_ENTITY);
response.addError(error);
continue;
}
WriteBatch relatedWriteBatch = relatedRollingWriteBatch.getWriteBatch();
byte[] relatedEntityStartTime = writeReverseOrderedLong(relatedEntityStartAndInsertTime);
// This is the new entity, the domain should be the same
byte[] key = createDomainIdKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime);
relatedWriteBatch.put(key, entity.getDomainId().getBytes(UTF_8));
++putCount;
relatedWriteBatch.put(createRelatedEntityKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime, entity.getEntityId(), entity.getEntityType()), EMPTY_BYTES);
++putCount;
relatedWriteBatch.put(createEntityMarkerKey(relatedEntity.getId(), relatedEntity.getType(), relatedEntityStartTime), EMPTY_BYTES);
++putCount;
} catch (IOException e) {
LOG.error("Error putting related entity " + relatedEntity.getId() + " of type " + relatedEntity.getType() + " for entity " + entity.getEntityId() + " of type " + entity.getEntityType(), e);
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(TimelinePutError.IO_EXCEPTION);
response.addError(error);
}
}
return putCount;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError in project hadoop by apache.
the class TestTimelineRecords method testTimelinePutErrors.
@Test
public void testTimelinePutErrors() throws Exception {
TimelinePutResponse TimelinePutErrors = new TimelinePutResponse();
TimelinePutError error1 = new TimelinePutError();
error1.setEntityId("entity id 1");
error1.setEntityId("entity type 1");
error1.setErrorCode(TimelinePutError.NO_START_TIME);
TimelinePutErrors.addError(error1);
List<TimelinePutError> response = new ArrayList<TimelinePutError>();
response.add(error1);
TimelinePutError error2 = new TimelinePutError();
error2.setEntityId("entity id 2");
error2.setEntityId("entity type 2");
error2.setErrorCode(TimelinePutError.IO_EXCEPTION);
response.add(error2);
TimelinePutErrors.addErrors(response);
LOG.info("Errors in JSON:");
LOG.info(TimelineUtils.dumpTimelineRecordtoJSON(TimelinePutErrors, true));
Assert.assertEquals(3, TimelinePutErrors.getErrors().size());
TimelinePutError e = TimelinePutErrors.getErrors().get(0);
Assert.assertEquals(error1.getEntityId(), e.getEntityId());
Assert.assertEquals(error1.getEntityType(), e.getEntityType());
Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
e = TimelinePutErrors.getErrors().get(1);
Assert.assertEquals(error1.getEntityId(), e.getEntityId());
Assert.assertEquals(error1.getEntityType(), e.getEntityType());
Assert.assertEquals(error1.getErrorCode(), e.getErrorCode());
e = TimelinePutErrors.getErrors().get(2);
Assert.assertEquals(error2.getEntityId(), e.getEntityId());
Assert.assertEquals(error2.getEntityType(), e.getEntityType());
Assert.assertEquals(error2.getErrorCode(), e.getErrorCode());
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError in project hadoop by apache.
the class KeyValueBasedTimelineStore method put.
@Override
public synchronized TimelinePutResponse put(TimelineEntities data) {
TimelinePutResponse response = new TimelinePutResponse();
if (getServiceStopped()) {
LOG.info("Service stopped, return null for the storage");
TimelinePutError error = new TimelinePutError();
error.setErrorCode(TimelinePutError.IO_EXCEPTION);
response.addError(error);
return response;
}
for (TimelineEntity entity : data.getEntities()) {
EntityIdentifier entityId = new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
// store entity info in memory
TimelineEntity existingEntity = entities.get(entityId);
boolean needsPut = false;
if (existingEntity == null) {
existingEntity = new TimelineEntity();
existingEntity.setEntityId(entity.getEntityId());
existingEntity.setEntityType(entity.getEntityType());
existingEntity.setStartTime(entity.getStartTime());
if (entity.getDomainId() == null || entity.getDomainId().length() == 0) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entityId.getId());
error.setEntityType(entityId.getType());
error.setErrorCode(TimelinePutError.NO_DOMAIN);
response.addError(error);
continue;
}
existingEntity.setDomainId(entity.getDomainId());
// insert a new entity to the storage, update insert time map
entityInsertTimes.put(entityId, System.currentTimeMillis());
needsPut = true;
}
if (entity.getEvents() != null) {
if (existingEntity.getEvents() == null) {
existingEntity.setEvents(entity.getEvents());
} else {
existingEntity.addEvents(entity.getEvents());
}
Collections.sort(existingEntity.getEvents());
needsPut = true;
}
// check startTime
if (existingEntity.getStartTime() == null) {
if (existingEntity.getEvents() == null || existingEntity.getEvents().isEmpty()) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entityId.getId());
error.setEntityType(entityId.getType());
error.setErrorCode(TimelinePutError.NO_START_TIME);
response.addError(error);
entities.remove(entityId);
entityInsertTimes.remove(entityId);
continue;
} else {
Long min = Long.MAX_VALUE;
for (TimelineEvent e : entity.getEvents()) {
if (min > e.getTimestamp()) {
min = e.getTimestamp();
}
}
existingEntity.setStartTime(min);
needsPut = true;
}
}
if (entity.getPrimaryFilters() != null) {
if (existingEntity.getPrimaryFilters() == null) {
existingEntity.setPrimaryFilters(new HashMap<String, Set<Object>>());
}
for (Entry<String, Set<Object>> pf : entity.getPrimaryFilters().entrySet()) {
for (Object pfo : pf.getValue()) {
existingEntity.addPrimaryFilter(pf.getKey(), KeyValueBasedTimelineStoreUtils.compactNumber(pfo));
needsPut = true;
}
}
}
if (entity.getOtherInfo() != null) {
if (existingEntity.getOtherInfo() == null) {
existingEntity.setOtherInfo(new HashMap<String, Object>());
}
for (Entry<String, Object> info : entity.getOtherInfo().entrySet()) {
existingEntity.addOtherInfo(info.getKey(), KeyValueBasedTimelineStoreUtils.compactNumber(info.getValue()));
needsPut = true;
}
}
if (needsPut) {
entities.put(entityId, existingEntity);
}
// relate it to other entities
if (entity.getRelatedEntities() == null) {
continue;
}
for (Entry<String, Set<String>> partRelatedEntities : entity.getRelatedEntities().entrySet()) {
if (partRelatedEntities == null) {
continue;
}
for (String idStr : partRelatedEntities.getValue()) {
EntityIdentifier relatedEntityId = new EntityIdentifier(idStr, partRelatedEntities.getKey());
TimelineEntity relatedEntity = entities.get(relatedEntityId);
if (relatedEntity != null) {
if (relatedEntity.getDomainId().equals(existingEntity.getDomainId())) {
relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId());
entities.put(relatedEntityId, relatedEntity);
} else {
// in this case the entity will be put, but the relation will be
// ignored
TimelinePutError error = new TimelinePutError();
error.setEntityType(existingEntity.getEntityType());
error.setEntityId(existingEntity.getEntityId());
error.setErrorCode(TimelinePutError.FORBIDDEN_RELATION);
response.addError(error);
}
} else {
relatedEntity = new TimelineEntity();
relatedEntity.setEntityId(relatedEntityId.getId());
relatedEntity.setEntityType(relatedEntityId.getType());
relatedEntity.setStartTime(existingEntity.getStartTime());
relatedEntity.addRelatedEntity(existingEntity.getEntityType(), existingEntity.getEntityId());
relatedEntity.setDomainId(existingEntity.getDomainId());
entities.put(relatedEntityId, relatedEntity);
entityInsertTimes.put(relatedEntityId, System.currentTimeMillis());
}
}
}
}
return response;
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError in project hadoop by apache.
the class LeveldbTimelineStore method handleError.
/**
* Handle error and set it in response.
*/
private static void handleError(TimelineEntity entity, TimelinePutResponse response, final int errorCode) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entity.getEntityId());
error.setEntityType(entity.getEntityType());
error.setErrorCode(errorCode);
response.addError(error);
}
use of org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError in project hadoop by apache.
the class TimelineStoreTestUtils method loadTestEntityData.
/**
* Load test entity data into the given store
*/
protected void loadTestEntityData() throws IOException {
beforeTs = System.currentTimeMillis() - 1;
TimelineEntities entities = new TimelineEntities();
Map<String, Set<Object>> primaryFilters = new HashMap<String, Set<Object>>();
Set<Object> l1 = new HashSet<Object>();
l1.add("username");
Set<Object> l2 = new HashSet<Object>();
l2.add(Integer.MAX_VALUE);
Set<Object> l3 = new HashSet<Object>();
l3.add("123abc");
Set<Object> l4 = new HashSet<Object>();
l4.add((long) Integer.MAX_VALUE + 1l);
primaryFilters.put("user", l1);
primaryFilters.put("appname", l2);
primaryFilters.put("other", l3);
primaryFilters.put("long", l4);
Map<String, Object> secondaryFilters = new HashMap<String, Object>();
secondaryFilters.put("startTime", 123456);
secondaryFilters.put("status", "RUNNING");
Map<String, Object> otherInfo1 = new HashMap<String, Object>();
otherInfo1.put("info1", "val1");
otherInfo1.putAll(secondaryFilters);
String entityId1 = "id_1";
String entityType1 = "type_1";
String entityId1b = "id_2";
String entityId2 = "id_2";
String entityType2 = "type_2";
String entityId4 = "id_4";
String entityType4 = "type_4";
String entityId5 = "id_5";
String entityType5 = "type_5";
String entityId6 = "id_6";
String entityId7 = "id_7";
String entityType7 = "type_7";
Map<String, Set<String>> relatedEntities = new HashMap<String, Set<String>>();
relatedEntities.put(entityType2, Collections.singleton(entityId2));
TimelineEvent ev3 = createEvent(789l, "launch_event", null);
TimelineEvent ev4 = createEvent(0l, "init_event", null);
List<TimelineEvent> events = new ArrayList<TimelineEvent>();
events.add(ev3);
events.add(ev4);
entities.setEntities(Collections.singletonList(createEntity(entityId2, entityType2, null, events, null, null, null, "domain_id_1")));
TimelinePutResponse response = store.put(entities);
assertEquals(0, response.getErrors().size());
TimelineEvent ev1 = createEvent(123l, "start_event", null);
entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, 123l, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, null, Collections.singletonList(ev1), relatedEntities, primaryFilters, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
Map<String, Object> eventInfo = new HashMap<String, Object>();
eventInfo.put("event info 1", "val1");
TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo);
Map<String, Object> otherInfo2 = new HashMap<String, Object>();
otherInfo2.put("info2", "val2");
entities.setEntities(Collections.singletonList(createEntity(entityId1, entityType1, null, Collections.singletonList(ev2), null, primaryFilters, otherInfo2, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity(entityId1b, entityType1, 789l, Collections.singletonList(ev2), null, primaryFilters, otherInfo2, "domain_id_1")));
response = store.put(entities);
assertEquals(0, response.getErrors().size());
entities.setEntities(Collections.singletonList(createEntity("badentityid", "badentity", null, null, null, null, otherInfo1, "domain_id_1")));
response = store.put(entities);
assertEquals(1, response.getErrors().size());
TimelinePutError error = response.getErrors().get(0);
assertEquals("badentityid", error.getEntityId());
assertEquals("badentity", error.getEntityType());
assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode());
relatedEntities.clear();
relatedEntities.put(entityType5, Collections.singleton(entityId5));
entities.setEntities(Collections.singletonList(createEntity(entityId4, entityType4, 42l, null, relatedEntities, null, null, "domain_id_1")));
response = store.put(entities);
relatedEntities.clear();
otherInfo1.put("info2", "val2");
entities.setEntities(Collections.singletonList(createEntity(entityId6, entityType1, 61l, null, relatedEntities, primaryFilters, otherInfo1, "domain_id_2")));
response = store.put(entities);
relatedEntities.clear();
relatedEntities.put(entityType1, Collections.singleton(entityId1));
entities.setEntities(Collections.singletonList(createEntity(entityId7, entityType7, 62l, null, relatedEntities, null, null, "domain_id_2")));
response = store.put(entities);
assertEquals(1, response.getErrors().size());
assertEquals(entityType7, response.getErrors().get(0).getEntityType());
assertEquals(entityId7, response.getErrors().get(0).getEntityId());
assertEquals(TimelinePutError.FORBIDDEN_RELATION, response.getErrors().get(0).getErrorCode());
if (store instanceof LeveldbTimelineStore) {
LeveldbTimelineStore leveldb = (LeveldbTimelineStore) store;
entities.setEntities(Collections.singletonList(createEntity("OLD_ENTITY_ID_1", "OLD_ENTITY_TYPE_1", 63l, null, null, null, null, null)));
leveldb.putWithNoDomainId(entities);
entities.setEntities(Collections.singletonList(createEntity("OLD_ENTITY_ID_2", "OLD_ENTITY_TYPE_1", 64l, null, null, null, null, null)));
leveldb.putWithNoDomainId(entities);
}
}
Aggregations