use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class Join method main.
public static void main(String[] args) throws Exception {
Options options = new Options();
CmdLineParser parser = new CmdLineParser(options);
if (args.length == 0) {
parser.printUsage(System.err);
return;
}
parser.parseArgument(args);
IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
JobSpecification job = createJob(parseFileSplits(options.inFileCustomerSplits), parseFileSplits(options.inFileOrderSplits), parseFileSplits(options.outFileSplits), options.numJoinPartitions, options.algo, options.graceInputSize, options.graceRecordsPerFrame, options.graceFactor, options.memSize, options.tableSize, options.hasGroupBy, options.frameSize);
if (job == null) {
return;
}
long start = System.currentTimeMillis();
JobId jobId = hcc.startJob(job, options.profile ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
hcc.waitForCompletion(jobId);
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class ClusterShutdownIT method runShutdown.
@Test
public void runShutdown() throws Exception {
IHyracksClientConnection hcc = new HyracksConnection("localhost", 1098);
hcc.stopCluster(false);
//what happens here...
closeTwice.expect(IPCException.class);
closeTwice.expectMessage("Cannot send on a closed handle");
hcc.stopCluster(false);
ServerSocket c = null;
ServerSocket s = null;
try {
c = new ServerSocket(1098);
//we should be able to bind to this
s = new ServerSocket(1099);
//and we should be able to bind to this too
} catch (Exception e) {
LOGGER.severe(e.getMessage());
throw e;
} finally {
s.close();
c.close();
}
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class WordCountMain method main.
public static void main(String[] args) throws Exception {
Options options = new Options();
CmdLineParser parser = new CmdLineParser(options);
parser.parseArgument(args);
IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
JobSpecification job = createJob(parseFileSplits(options.inFileSplits), parseFileSplits(options.outFileSplits), options.algo, options.htSize, options.memFrameLimit, options.format, options.frameSize);
long start = System.currentTimeMillis();
JobId jobId = hcc.startJob(job, options.runtimeProfiling ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
hcc.waitForCompletion(jobId);
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class DataflowTest method testHDFSReadWriteOperators.
/**
* Test a job with only HDFS read and writes.
*
* @throws Exception
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public void testHDFSReadWriteOperators() throws Exception {
FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
conf.setInputFormatClass(TextInputFormat.class);
Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getInputFormatClass(), getConfiguration());
List<InputSplit> splits = inputFormat.getSplits(conf);
String[] readSchedule = scheduler.getLocationConstraints(splits);
JobSpecification jobSpec = new JobSpecification();
RecordDescriptor recordDesc = new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits, readSchedule, new TextKeyValueParserFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
ExternalSortOperatorDescriptor sortOperator = new ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf, new TextTupleWriterFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }), new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null), sortOperator, 0, writeOperator, 0);
jobSpec.addRoot(writeOperator);
IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
JobId jobId = client.startJob(jobSpec);
client.waitForCompletion(jobId);
Assert.assertEquals(true, checkResults());
}
use of org.apache.hyracks.api.client.IHyracksClientConnection in project asterixdb by apache.
the class OptimizerTest method test.
@Test
public void test() throws Exception {
try {
String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0), '/');
if (!only.isEmpty()) {
boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
if (!toRun) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath() + "\" \"only.txt\" not empty and not in \"only.txt\".");
}
Assume.assumeTrue(toRun);
}
boolean skipped = TestHelper.isInPrefixList(ignore, queryFileShort);
if (skipped) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath() + "\" in \"ignore.txt\".");
}
Assume.assumeTrue(!skipped);
LOGGER.info("RUN TEST: \"" + queryFile.getPath() + "\"");
Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
// Forces the creation of actualFile.
actualFile.getParentFile().mkdirs();
PrintWriter plan = new PrintWriter(actualFile);
ILangCompilationProvider provider = queryFile.getName().endsWith("aql") ? aqlCompilationProvider : sqlppCompilationProvider;
if (extensionLangCompilationProvider != null) {
provider = extensionLangCompilationProvider;
}
IHyracksClientConnection hcc = integrationUtil.getHyracksClientConnection();
AsterixJavaClient asterix = new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc, query, plan, provider, statementExecutorFactory, storageComponentProvider);
try {
asterix.compile(true, false, false, true, true, false, false);
} catch (AsterixException e) {
plan.close();
query.close();
throw new Exception("Compile ERROR for " + queryFile + ": " + e.getMessage(), e);
}
plan.close();
query.close();
BufferedReader readerExpected = new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile), "UTF-8"));
BufferedReader readerActual = new BufferedReader(new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
String lineExpected, lineActual;
int num = 1;
try {
while ((lineExpected = readerExpected.readLine()) != null) {
lineActual = readerActual.readLine();
if (lineActual == null) {
throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< " + lineExpected + "\n> ");
}
if (!lineExpected.equals(lineActual)) {
throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< " + lineExpected + "\n> " + lineActual);
}
++num;
}
lineActual = readerActual.readLine();
if (lineActual != null) {
throw new Exception("Result for " + queryFile + " changed at line " + num + ":\n< \n> " + lineActual);
}
LOGGER.info("Test \"" + queryFile.getPath() + "\" PASSED!");
actualFile.delete();
} finally {
readerExpected.close();
readerActual.close();
}
} catch (Exception e) {
if (!(e instanceof AssumptionViolatedException)) {
LOGGER.severe("Test \"" + queryFile.getPath() + "\" FAILED!");
throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
} else {
throw e;
}
}
}
Aggregations