use of com.twitter.ambrose.model.Job in project ambrose by twitter.
the class JSONUtil method newMapper.
private static ObjectMapper newMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.enable(SerializationFeature.INDENT_OUTPUT);
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_TARGET, false);
mapper.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
mapper.disable(SerializationFeature.FLUSH_AFTER_WRITE_VALUE);
mapper.disable(SerializationFeature.CLOSE_CLOSEABLE);
mapper.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS);
mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
Reflections reflections = new Reflections("com.twitter.ambrose");
Set<Class<? extends Job>> jobSubTypes = reflections.getSubTypesOf(Job.class);
mapper.registerSubtypes(jobSubTypes.toArray(new Class<?>[jobSubTypes.size()]));
return mapper;
}
use of com.twitter.ambrose.model.Job in project ambrose by twitter.
the class AmbroseHivePreHook method sendFilteredJobsStatus.
private void sendFilteredJobsStatus(String queryId, EmbeddedAmbroseHiveProgressReporter reporter, Map<String, DAGNode<Job>> nodeIdToDAGNode) {
if (nodeIdToDAGNode == null) {
return;
}
Map<WorkflowProgressField, String> eventData = new HashMap<Event.WorkflowProgressField, String>(1);
int skipped = 0;
for (DAGNode<Job> dagNode : nodeIdToDAGNode.values()) {
Job job = dagNode.getJob();
// filtered jobs don't have assigned jobId
if (job.getId() != null) {
continue;
}
String nodeId = dagNode.getName();
job.setId(AmbroseHiveUtil.asDisplayId(queryId, "filtered out", nodeId));
reporter.addJobIdToProgress(nodeId, 100);
reporter.pushEvent(queryId, new Event.JobFinishedEvent(dagNode));
skipped++;
}
// sleep so that all these events will be visible on GUI before going on
try {
Thread.sleep(skipped * 1000L);
} catch (InterruptedException e) {
LOG.warn("Sleep interrupted", e);
}
eventData.put(WorkflowProgressField.workflowProgress, Integer.toString(reporter.getOverallProgress()));
reporter.pushEvent(queryId, new Event.WorkflowProgressEvent(eventData));
}
use of com.twitter.ambrose.model.Job in project ambrose by twitter.
the class HiveDAGTransformer method asDAGNode.
/**
* Converts job properties to a DAGNode representation
*
* @param task
* @return
*/
private DAGNode<Job> asDAGNode(Task<? extends Serializable> task) {
MapredWork mrWork = (MapredWork) task.getWork();
List<String> indexTableAliases = getAllJobAliases(getPathToAliases(mrWork));
String[] features = getFeatures(mrWork.getAllOperators(), task.getTaskTag());
String[] displayAliases = getDisplayAliases(indexTableAliases);
// DAGNode's name of a workflow is unique among all workflows
DAGNode<Job> dagNode = new DAGNode<Job>(AmbroseHiveUtil.getNodeIdFromNodeName(conf, task.getId()), new HiveJob(displayAliases, features));
// init empty successors
dagNode.setSuccessors(new ArrayList<DAGNode<? extends Job>>());
return dagNode;
}
use of com.twitter.ambrose.model.Job in project ambrose by twitter.
the class CascadingJobTest method doTestRoundTrip.
private void doTestRoundTrip(CascadingJob expected) throws IOException {
String asJson = expected.toJson();
Job asJobAgain = Job.fromJson(asJson);
// assert that if we get a PigJob without having to ask for it explicitly
assertTrue(asJobAgain instanceof CascadingJob);
assertJobEquals(expected, (CascadingJob) asJobAgain);
}
use of com.twitter.ambrose.model.Job in project ambrose by twitter.
the class HiveJobTest method doTestRoundTrip.
private void doTestRoundTrip(HiveJob expected) throws IOException {
String asJson = expected.toJson();
Job asJobAgain = Job.fromJson(asJson);
// assert that if we get a HiveJob without having to ask for it
// explicitly
assertTrue(asJobAgain instanceof HiveJob);
assertJobEquals(expected, (HiveJob) asJobAgain);
}
Aggregations