use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.
the class JobSpecificationActivityClusterGraphGeneratorFactory method createActivityClusterGraphGenerator.
@Override
public IActivityClusterGraphGenerator createActivityClusterGraphGenerator(JobId jobId, final ICCServiceContext ccServiceCtx, Set<JobFlag> jobFlags) throws HyracksException {
final JobActivityGraphBuilder builder = new JobActivityGraphBuilder(spec, jobFlags);
PlanUtils.visit(spec, new IConnectorDescriptorVisitor() {
@Override
public void visit(IConnectorDescriptor conn) throws HyracksException {
builder.addConnector(conn);
}
});
PlanUtils.visit(spec, new IOperatorDescriptorVisitor() {
@Override
public void visit(IOperatorDescriptor op) {
op.contributeActivities(builder);
}
});
builder.finish();
final JobActivityGraph jag = builder.getActivityGraph();
ActivityClusterGraphBuilder acgb = new ActivityClusterGraphBuilder();
final ActivityClusterGraph acg = acgb.inferActivityClusters(jobId, jag);
acg.setFrameSize(spec.getFrameSize());
acg.setMaxReattempts(spec.getMaxReattempts());
acg.setJobletEventListenerFactory(spec.getJobletEventListenerFactory());
acg.setGlobalJobDataFactory(spec.getGlobalJobDataFactory());
acg.setConnectorPolicyAssignmentPolicy(spec.getConnectorPolicyAssignmentPolicy());
acg.setUseConnectorPolicyForScheduling(spec.isUseConnectorPolicyForScheduling());
final Set<Constraint> constraints = new HashSet<>();
final IConstraintAcceptor acceptor = new IConstraintAcceptor() {
@Override
public void addConstraint(Constraint constraint) {
constraints.add(constraint);
}
};
PlanUtils.visit(spec, new IOperatorDescriptorVisitor() {
@Override
public void visit(IOperatorDescriptor op) {
op.contributeSchedulingConstraints(acceptor, ccServiceCtx);
}
});
PlanUtils.visit(spec, new IConnectorDescriptorVisitor() {
@Override
public void visit(IConnectorDescriptor conn) {
conn.contributeSchedulingConstraints(acceptor, acg.getConnectorMap().get(conn.getConnectorId()), ccServiceCtx);
}
});
constraints.addAll(spec.getUserConstraints());
return new IActivityClusterGraphGenerator() {
@Override
public ActivityClusterGraph initialize() {
ActivityClusterGraphRewriter rewriter = new ActivityClusterGraphRewriter();
rewriter.rewrite(acg);
return acg;
}
@Override
public Set<Constraint> getConstraints() {
return constraints;
}
};
}
use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.
the class HyracksConnection method deployBinary.
@Override
public DeploymentId deployBinary(List<String> jars) throws Exception {
/** generate a deployment id */
DeploymentId deploymentId = new DeploymentId(UUID.randomUUID().toString());
List<URL> binaryURLs = new ArrayList<>();
if (jars != null && !jars.isEmpty()) {
CloseableHttpClient hc = new DefaultHttpClient();
try {
/** upload jars through a http client one-by-one to the CC server */
for (String jar : jars) {
int slashIndex = jar.lastIndexOf('/');
String fileName = jar.substring(slashIndex + 1);
String url = "http://" + ccHost + ":" + ccInfo.getWebPort() + "/applications/" + deploymentId.toString() + "&" + fileName;
HttpPut put = new HttpPut(url);
put.setEntity(new FileEntity(new File(jar), "application/octet-stream"));
HttpResponse response = hc.execute(put);
response.getEntity().consumeContent();
if (response.getStatusLine().getStatusCode() != 200) {
hci.unDeployBinary(deploymentId);
throw new HyracksException(response.getStatusLine().toString());
}
/** add the uploaded URL address into the URLs of jars to be deployed at NCs */
binaryURLs.add(new URL(url));
}
} finally {
hc.close();
}
}
/** deploy the URLs to the CC and NCs */
hci.deployBinary(binaryURLs, deploymentId);
return deploymentId;
}
use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.
the class QueryServiceServlet method handleRequest.
private void handleRequest(RequestParameters param, IServletResponse response) throws IOException {
LOGGER.info(param.toString());
long elapsedStart = System.nanoTime();
final StringWriter stringWriter = new StringWriter();
final PrintWriter resultWriter = new PrintWriter(stringWriter);
ResultDelivery delivery = parseResultDelivery(param.mode);
String handleUrl = getHandleUrl(param.host, param.path, delivery);
SessionOutput sessionOutput = createSessionOutput(param, handleUrl, resultWriter);
SessionConfig sessionConfig = sessionOutput.config();
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
HttpResponseStatus status = HttpResponseStatus.OK;
Stats stats = new Stats();
long[] execStartEnd = new long[] { -1, -1 };
resultWriter.print("{\n");
printRequestId(resultWriter);
printClientContextID(resultWriter, param);
printSignature(resultWriter);
printType(resultWriter, sessionConfig);
try {
if (param.statement == null || param.statement.isEmpty()) {
throw new AsterixException("Empty request, no statement provided");
}
String statementsText = param.statement + ";";
executeStatement(statementsText, sessionOutput, delivery, stats, param, handleUrl, execStartEnd);
if (ResultDelivery.IMMEDIATE == delivery || ResultDelivery.DEFERRED == delivery) {
ResultUtil.printStatus(sessionOutput, ResultStatus.SUCCESS);
}
} catch (AlgebricksException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
GlobalConfig.ASTERIX_LOGGER.log(Level.INFO, pe.getMessage(), pe);
ResultUtil.printError(resultWriter, pe);
ResultUtil.printStatus(sessionOutput, ResultStatus.FATAL);
status = HttpResponseStatus.BAD_REQUEST;
} catch (HyracksException pe) {
GlobalConfig.ASTERIX_LOGGER.log(Level.WARNING, pe.getMessage(), pe);
ResultUtil.printError(resultWriter, pe);
ResultUtil.printStatus(sessionOutput, ResultStatus.FATAL);
status = HttpResponseStatus.INTERNAL_SERVER_ERROR;
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Unexpected exception", e);
ResultUtil.printError(resultWriter, e);
ResultUtil.printStatus(sessionOutput, ResultStatus.FATAL);
status = HttpResponseStatus.INTERNAL_SERVER_ERROR;
} finally {
if (execStartEnd[0] == -1) {
execStartEnd[1] = -1;
} else if (execStartEnd[1] == -1) {
execStartEnd[1] = System.nanoTime();
}
}
printMetrics(resultWriter, System.nanoTime() - elapsedStart, execStartEnd[1] - execStartEnd[0], stats.getCount(), stats.getSize());
resultWriter.print("}\n");
resultWriter.flush();
String result = stringWriter.toString();
GlobalConfig.ASTERIX_LOGGER.log(Level.FINE, result);
response.setStatus(status);
response.writer().print(result);
if (response.writer().checkError()) {
LOGGER.warning("Error flushing output writer");
}
}
use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.
the class JobManagerTest method test.
@Test
public void test() throws IOException, CmdLineException {
IJobCapacityController jobCapacityController = mock(IJobCapacityController.class);
IJobManager jobManager = spy(new JobManager(ccConfig, mockClusterControllerService(), jobCapacityController));
// Submits runnable jobs.
List<JobRun> acceptedRuns = new ArrayList<>();
for (int id = 0; id < 4096; ++id) {
// Mocks an immediately executable job.
JobRun run = mockJobRun(id);
JobSpecification job = mock(JobSpecification.class);
when(run.getJobSpecification()).thenReturn(job);
when(jobCapacityController.allocate(job)).thenReturn(IJobCapacityController.JobSubmissionStatus.EXECUTE);
// Submits the job.
acceptedRuns.add(run);
jobManager.add(run);
Assert.assertTrue(jobManager.getRunningJobs().size() == id + 1);
Assert.assertTrue(jobManager.getPendingJobs().isEmpty());
}
// Submits jobs that will be deferred due to the capacity limitation.
List<JobRun> deferredRuns = new ArrayList<>();
for (int id = 4096; id < 8192; ++id) {
// Mocks a deferred job.
JobRun run = mockJobRun(id);
JobSpecification job = mock(JobSpecification.class);
when(run.getJobSpecification()).thenReturn(job);
when(jobCapacityController.allocate(job)).thenReturn(IJobCapacityController.JobSubmissionStatus.QUEUE).thenReturn(IJobCapacityController.JobSubmissionStatus.EXECUTE);
// Submits the job.
deferredRuns.add(run);
jobManager.add(run);
Assert.assertTrue(jobManager.getRunningJobs().size() == 4096);
Assert.assertTrue(jobManager.getPendingJobs().size() == id + 1 - 4096);
}
// Further jobs will be denied because the job queue is full.
boolean jobQueueFull = false;
try {
JobRun run = mockJobRun(8193);
JobSpecification job = mock(JobSpecification.class);
when(run.getJobSpecification()).thenReturn(job);
when(jobCapacityController.allocate(job)).thenReturn(IJobCapacityController.JobSubmissionStatus.QUEUE).thenReturn(IJobCapacityController.JobSubmissionStatus.EXECUTE);
jobManager.add(run);
} catch (HyracksException e) {
// Verifies the error code.
jobQueueFull = e.getErrorCode() == ErrorCode.JOB_QUEUE_FULL;
}
Assert.assertTrue(jobQueueFull);
// Completes runnable jobs.
for (JobRun run : acceptedRuns) {
jobManager.prepareComplete(run, JobStatus.TERMINATED, Collections.emptyList());
jobManager.finalComplete(run);
}
Assert.assertTrue(jobManager.getRunningJobs().size() == 4096);
Assert.assertTrue(jobManager.getPendingJobs().isEmpty());
Assert.assertTrue(jobManager.getArchivedJobs().size() == ccConfig.getJobHistorySize());
// Completes deferred jobs.
for (JobRun run : deferredRuns) {
jobManager.prepareComplete(run, JobStatus.TERMINATED, Collections.emptyList());
jobManager.finalComplete(run);
}
Assert.assertTrue(jobManager.getRunningJobs().isEmpty());
Assert.assertTrue(jobManager.getPendingJobs().isEmpty());
Assert.assertTrue(jobManager.getArchivedJobs().size() == ccConfig.getJobHistorySize());
verify(jobManager, times(8192)).prepareComplete(any(), any(), any());
verify(jobManager, times(8192)).finalComplete(any());
}
use of org.apache.hyracks.api.exceptions.HyracksException in project asterixdb by apache.
the class JobManagerTest method testExceedMax.
@Test
public void testExceedMax() throws HyracksException {
IJobCapacityController jobCapacityController = mock(IJobCapacityController.class);
IJobManager jobManager = spy(new JobManager(ccConfig, mockClusterControllerService(), jobCapacityController));
boolean rejected = false;
// A job should be rejected immediately if its requirement exceeds the maximum capacity of the cluster.
try {
JobRun run = mockJobRun(1);
JobSpecification job = mock(JobSpecification.class);
when(run.getJobSpecification()).thenReturn(job);
when(jobCapacityController.allocate(job)).thenThrow(HyracksException.create(ErrorCode.JOB_REQUIREMENTS_EXCEED_CAPACITY, "1", "0"));
jobManager.add(run);
} catch (HyracksException e) {
// Verifies the error code.
rejected = e.getErrorCode() == ErrorCode.JOB_REQUIREMENTS_EXCEED_CAPACITY;
}
Assert.assertTrue(rejected);
Assert.assertTrue(jobManager.getRunningJobs().isEmpty());
Assert.assertTrue(jobManager.getPendingJobs().isEmpty());
Assert.assertTrue(jobManager.getArchivedJobs().size() == 0);
}
Aggregations