use of io.cdap.cdap.spi.data.StructuredRow in project cdap by caskdata.
the class LineageTable method getAccessTimesForRun.
/**
* @return a set of access times (for program and data it accesses) associated with a program run.
*/
@VisibleForTesting
public List<Long> getAccessTimesForRun(ProgramRunId run) throws IOException {
ImmutableList.Builder<Long> builder = ImmutableList.builder();
List<Field<?>> prefix = getRunScanStartKey(run);
try (CloseableIterator<StructuredRow> iterator = getProgramTable().scan(Range.singleton(prefix), Integer.MAX_VALUE)) {
while (iterator.hasNext()) {
StructuredRow row = iterator.next();
if (run.getRun().equals(row.getString(StoreDefinition.LineageStore.RUN_FIELD))) {
builder.add(row.getLong(StoreDefinition.LineageStore.ACCESS_TIME_FIELD));
}
}
}
return builder.build();
}
use of io.cdap.cdap.spi.data.StructuredRow in project cdap by caskdata.
the class LineageTable method scanRelations.
private Set<Relation> scanRelations(StructuredTable table, List<Field<?>> startKey, List<Field<?>> endKey, Predicate<Relation> filter) throws IOException {
ImmutableSet.Builder<Relation> builder = ImmutableSet.builder();
try (CloseableIterator<StructuredRow> iterator = table.scan(Range.create(startKey, Range.Bound.INCLUSIVE, endKey, Range.Bound.INCLUSIVE), Integer.MAX_VALUE)) {
while (iterator.hasNext()) {
StructuredRow row = iterator.next();
Relation relation = toRelation(row);
if (filter.test(relation)) {
builder.add(relation);
}
}
}
return builder.build();
}
use of io.cdap.cdap.spi.data.StructuredRow in project cdap by caskdata.
the class LineageTable method getEntitiesForRun.
/**
* @return a set of entities (program and data it accesses) associated with a program run.
*/
public Set<NamespacedEntityId> getEntitiesForRun(ProgramRunId run) throws IOException {
ImmutableSet.Builder<NamespacedEntityId> builder = ImmutableSet.builder();
List<Field<?>> prefix = getRunScanStartKey(run);
try (CloseableIterator<StructuredRow> iterator = getProgramTable().scan(Range.singleton(prefix), Integer.MAX_VALUE)) {
while (iterator.hasNext()) {
StructuredRow row = iterator.next();
if (run.getRun().equals(row.getString(StoreDefinition.LineageStore.RUN_FIELD))) {
builder.add(getProgramFromRow(row));
builder.add(getDatasetFromRow(row));
}
}
}
return builder.build();
}
use of io.cdap.cdap.spi.data.StructuredRow in project cdap by caskdata.
the class JobQueueTable method toJob.
/**
* Construct a Job object from the rows in iterator.
* A job may need up to three rows from the iterator for its construction -
* <ul>
* <li>Row JOB - the row containing the job data, required</li>
* <li>Row DELETE - the row containing the time when the job was marked for deletion, optional</li>
* <li>Row OBSOLETE - the row containing the time when the job was marked as obsolete, optional</li>
* </ul>
* The above three rows will always be next to each other due to sorting.
*
* @param peekingIterator should have at least one element
* @return a Job object for the first schedule in the iterator
*/
private Job toJob(PeekingIterator<StructuredRow> peekingIterator) {
SimpleJob job = null;
Long toBeDeletedTime = null;
Long isObsoleteTime = null;
// Get the schedule id for the job from the first element
String scheduleId = getScheduleId(peekingIterator.peek());
// Also get the generationId to only read the rows for the current job
int generationId = getGenerationId(peekingIterator.peek());
// Get all the rows for the current job from the iterator
while (peekingIterator.hasNext() && generationId == getGenerationId(peekingIterator.peek()) && scheduleId.equals(getScheduleId(peekingIterator.peek()))) {
StructuredRow row = peekingIterator.next();
StoreDefinition.JobQueueStore.RowType rowType = getRowType(row);
switch(rowType) {
case JOB:
job = fromStructuredRow(row);
break;
case DELETE:
toBeDeletedTime = row.getLong(StoreDefinition.JobQueueStore.DELETE_TIME);
break;
case OBSOLETE:
isObsoleteTime = row.getLong(StoreDefinition.JobQueueStore.OBSOLETE_TIME);
break;
default:
// Should not happen unless a new value is added to the RowType enum
throw new IllegalStateException(String.format("Unknown row type encountered in job queue: %s", rowType));
}
}
if (job == null) {
// Should not happen since we always write delete time or obsolete time only after reading the job from store
throw new IllegalStateException(String.format("Cannot find job for schedule id: %s", scheduleId));
}
Long timeToSet = toBeDeletedTime == null ? isObsoleteTime : isObsoleteTime == null ? toBeDeletedTime : new Long(Math.min(isObsoleteTime, toBeDeletedTime));
if (timeToSet != null) {
job.setToBeDeleted(timeToSet);
}
return job;
}
use of io.cdap.cdap.spi.data.StructuredRow in project cdap by caskdata.
the class ProgramScheduleStoreDataset method addScheduleWithStatus.
/**
* Add a schedule to the store.
*
* @param schedule the schedule to add
* @param status the status of the schedule to add
* @param currentTime the current time in milliseconds when adding the schedule
* @throws AlreadyExistsException if the schedule already exists
*/
private void addScheduleWithStatus(ProgramSchedule schedule, ProgramScheduleStatus status, long currentTime) throws AlreadyExistsException, IOException {
Collection<Field<?>> scheduleKeys = getScheduleKeys(schedule.getScheduleId());
Optional<StructuredRow> existing = scheduleStore.read(scheduleKeys);
if (existing.isPresent() && existing.get().getString(StoreDefinition.ProgramScheduleStore.SCHEDULE) != null) {
throw new AlreadyExistsException(schedule.getScheduleId());
}
Collection<Field<?>> scheduleFields = new ArrayList<>(scheduleKeys);
scheduleFields.add(Fields.stringField(StoreDefinition.ProgramScheduleStore.SCHEDULE, GSON.toJson(schedule)));
scheduleFields.add(Fields.longField(StoreDefinition.ProgramScheduleStore.UPDATE_TIME, currentTime));
scheduleFields.add(Fields.stringField(StoreDefinition.ProgramScheduleStore.STATUS, status.toString()));
scheduleStore.upsert(scheduleFields);
int count = 0;
for (String triggerKey : extractTriggerKeys(schedule)) {
Collection<Field<?>> triggerFields = getTriggerKeys(scheduleKeys, count++);
triggerFields.add(Fields.stringField(StoreDefinition.ProgramScheduleStore.TRIGGER_KEY, triggerKey));
triggerStore.upsert(triggerFields);
}
}
Aggregations