use of co.cask.cdap.internal.app.runtime.schedule.StreamSizeScheduleState in project cdap by caskdata.
the class DatasetBasedStreamSizeScheduleStore method list.
/**
* @return a list of all the schedules and their states present in the store
*/
public synchronized List<StreamSizeScheduleState> list() throws InterruptedException, TransactionFailureException {
final List<StreamSizeScheduleState> scheduleStates = Lists.newArrayList();
factory.createExecutor(ImmutableList.of((TransactionAware) table)).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
try (Scanner scan = getScannerWithPrefix(table, KEY_PREFIX)) {
Row row;
while ((row = scan.next()) != null) {
byte[] scheduleBytes = row.get(SCHEDULE_COL);
byte[] baseSizeBytes = row.get(BASE_SIZE_COL);
byte[] baseTsBytes = row.get(BASE_TS_COL);
byte[] lastRunSizeBytes = row.get(LAST_RUN_SIZE_COL);
byte[] lastRunTsBytes = row.get(LAST_RUN_TS_COL);
byte[] activeBytes = row.get(ACTIVE_COL);
byte[] propertyBytes = row.get(PROPERTIES_COL);
if (isInvalidRow(row)) {
LIMITED_LOG.debug("Stream Sized Schedule entry with Row key {} does not have all columns.", Bytes.toString(row.getRow()));
continue;
}
String rowKey = Bytes.toString(row.getRow());
String[] splits = rowKey.split(":");
ProgramId program;
if (splits.length == 7) {
// New Row key for the trigger should be of the form -
// streamSizeSchedule:namespace:application:version:type:program:schedule
program = new ApplicationId(splits[1], splits[2], splits[3]).program(ProgramType.valueOf(splits[4]), splits[5]);
} else if (splits.length == 6) {
program = new ApplicationId(splits[1], splits[2]).program(ProgramType.valueOf(splits[3]), splits[4]);
} else {
continue;
}
SchedulableProgramType programType = program.getType().getSchedulableType();
StreamSizeSchedule schedule = GSON.fromJson(Bytes.toString(scheduleBytes), StreamSizeSchedule.class);
long baseSize = Bytes.toLong(baseSizeBytes);
long baseTs = Bytes.toLong(baseTsBytes);
long lastRunSize = Bytes.toLong(lastRunSizeBytes);
long lastRunTs = Bytes.toLong(lastRunTsBytes);
boolean active = Bytes.toBoolean(activeBytes);
Map<String, String> properties = Maps.newHashMap();
if (propertyBytes != null) {
properties = GSON.fromJson(Bytes.toString(propertyBytes), STRING_MAP_TYPE);
}
StreamSizeScheduleState scheduleState = new StreamSizeScheduleState(program, programType, schedule, properties, baseSize, baseTs, lastRunSize, lastRunTs, active);
scheduleStates.add(scheduleState);
LOG.debug("StreamSizeSchedule found in store: {}", scheduleState);
}
}
}
});
return scheduleStates;
}
Aggregations