use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class RestApiServlet method doHandle.
private void doHandle(IServletResponse response, String query, SessionOutput sessionOutput, ResultDelivery resultDelivery) throws JsonProcessingException {
try {
response.setStatus(HttpResponseStatus.OK);
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
IHyracksDataset hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
synchronized (ctx) {
hds = (IHyracksDataset) ctx.get(HYRACKS_DATASET_ATTR);
if (hds == null) {
hds = new HyracksDataset(hcc, appCtx.getCompilerProperties().getFrameSize(), ResultReader.NUM_READERS);
ctx.put(HYRACKS_DATASET_ATTR, hds);
}
}
}
IParser parser = parserFactory.createParser(query);
List<Statement> aqlStatements = parser.parse();
validate(aqlStatements);
MetadataManager.INSTANCE.init();
IStatementExecutor translator = statementExecutorFactory.create(appCtx, aqlStatements, sessionOutput, compilationProvider, componentProvider);
translator.compileAndExecute(hcc, hds, resultDelivery, null, new IStatementExecutor.Stats());
} catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, pe.getMessage(), pe);
String errorMessage = ResultUtil.buildParseExceptionMessage(pe, query);
ObjectNode errorResp = ResultUtil.getErrorResponse(2, errorMessage, "", ResultUtil.extractFullStackTrace(pe));
sessionOutput.out().write(new ObjectMapper().writeValueAsString(errorResp));
} catch (Exception e) {
GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, e.getMessage(), e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
ResultUtil.apiErrorHandler(sessionOutput.out(), e);
}
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class ClusterControllerDetailsApiServlet method get.
@Override
protected void get(IServletRequest request, IServletResponse response) throws IOException {
PrintWriter responseWriter = response.writer();
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
try {
ObjectNode json;
response.setStatus(HttpResponseStatus.OK);
if ("".equals(localPath(request))) {
json = (ObjectNode) getClusterStateJSON(request, "../").get("cc");
} else {
json = processNode(request, hcc);
}
HttpUtil.setContentType(response, HttpUtil.ContentType.APPLICATION_JSON, HttpUtil.Encoding.UTF8);
responseWriter.write(new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(json));
} catch (IllegalArgumentException e) {
// NOSONAR - exception not logged or rethrown
response.setStatus(HttpResponseStatus.NOT_FOUND);
} catch (Exception e) {
LOGGER.log(Level.INFO, "exception thrown for " + request, e);
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
responseWriter.write(e.toString());
}
responseWriter.flush();
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class DiagnosticsApiServlet method getClusterDiagnosticsJSON.
private ObjectNode getClusterDiagnosticsJSON() throws Exception {
ObjectMapper om = new ObjectMapper();
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(HYRACKS_CONNECTION_ATTR);
ExecutorService executor = (ExecutorService) ctx.get(ServletConstants.EXECUTOR_SERVICE_ATTR);
Map<String, Future<ObjectNode>> ccFutureData = new HashMap<>();
ccFutureData.put("threaddump", executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getThreadDump(null)))));
ccFutureData.put("config", executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getNodeDetailsJSON(null, false, true)))));
ccFutureData.put("stats", executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getNodeDetailsJSON(null, true, false)))));
Map<String, Map<String, Future<ObjectNode>>> ncDataMap = new HashMap<>();
for (String nc : appCtx.getMetadataProperties().getNodeNames()) {
Map<String, Future<ObjectNode>> ncData = new HashMap<>();
ncData.put("threaddump", executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getThreadDump(nc)))));
ncData.put("config", executor.submit(() -> fixupKeys((ObjectNode) om.readTree(hcc.getNodeDetailsJSON(nc, false, true)))));
ncData.put("stats", executor.submit(() -> fixupKeys(processNodeStats(hcc, nc))));
ncDataMap.put(nc, ncData);
}
ObjectNode result = om.createObjectNode();
result.putPOJO("cc", resolveFutures(ccFutureData));
List<Map<String, ?>> ncList = new ArrayList<>();
for (Map.Entry<String, Map<String, Future<ObjectNode>>> entry : ncDataMap.entrySet()) {
final Map<String, Object> ncMap = resolveFutures(entry.getValue());
ncMap.put("node_id", entry.getKey());
ncList.add(ncMap);
}
result.putPOJO("ncs", ncList);
result.putPOJO("date", new Date());
return result;
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class DataflowTest method testHDFSReadWriteOperators.
/**
* Test a job with only HDFS read and writes.
*
* @throws Exception
*/
public void testHDFSReadWriteOperators() throws Exception {
FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
conf.setInputFormat(TextInputFormat.class);
Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
InputSplit[] splits = conf.getInputFormat().getSplits(conf, numberOfNC * 4);
String[] readSchedule = scheduler.getLocationConstraints(splits);
JobSpecification jobSpec = new JobSpecification();
RecordDescriptor recordDesc = new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits, readSchedule, new TextKeyValueParserFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
ExternalSortOperatorDescriptor sortOperator = new ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf, new TextTupleWriterFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }), new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null), sortOperator, 0, writeOperator, 0);
jobSpec.addRoot(writeOperator);
IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
JobId jobId = client.startJob(jobSpec);
client.waitForCompletion(jobId);
Assert.assertEquals(true, checkResults());
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class Groupby method main.
public static void main(String[] args) throws Exception {
Options options = new Options();
CmdLineParser parser = new CmdLineParser(options);
if (args.length == 0) {
parser.printUsage(System.err);
return;
}
parser.parseArgument(args);
IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
JobSpecification job;
long start = System.currentTimeMillis();
job = createJob(parseFileSplits(options.inFileSplits), parseFileSplits(options.outFileSplits), options.htSize, options.fileSize, options.frameLimit, options.frameSize, options.algo, options.outPlain);
if (job != null) {
System.out.print("CreateJobTime:" + (System.currentTimeMillis() - start));
start = System.currentTimeMillis();
JobId jobId = hcc.startJob(job);
hcc.waitForCompletion(jobId);
System.out.println("JobExecuteTime:" + (System.currentTimeMillis() - start));
}
}
Aggregations