use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class Join method main.
public static void main(String[] args) throws Exception {
Options options = new Options();
CmdLineParser parser = new CmdLineParser(options);
if (args.length == 0) {
parser.printUsage(System.err);
return;
}
parser.parseArgument(args);
IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
JobSpecification job = createJob(parseFileSplits(options.inFileCustomerSplits), parseFileSplits(options.inFileOrderSplits), parseFileSplits(options.outFileSplits), options.numJoinPartitions, options.algo, options.graceInputSize, options.graceRecordsPerFrame, options.graceFactor, options.memSize, options.tableSize, options.hasGroupBy, options.frameSize);
if (job == null) {
return;
}
long start = System.currentTimeMillis();
JobId jobId = hcc.startJob(job, options.profile ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
hcc.waitForCompletion(jobId);
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class WordCountMain method main.
public static void main(String[] args) throws Exception {
Options options = new Options();
CmdLineParser parser = new CmdLineParser(options);
parser.parseArgument(args);
IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
JobSpecification job = createJob(parseFileSplits(options.inFileSplits), parseFileSplits(options.outFileSplits), options.algo, options.htSize, options.memFrameLimit, options.format, options.frameSize);
long start = System.currentTimeMillis();
JobId jobId = hcc.startJob(job, options.runtimeProfiling ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
hcc.waitForCompletion(jobId);
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class DataflowTest method testHDFSReadWriteOperators.
/**
* Test a job with only HDFS read and writes.
*
* @throws Exception
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public void testHDFSReadWriteOperators() throws Exception {
FileInputFormat.setInputPaths(conf, HDFS_INPUT_PATH);
FileOutputFormat.setOutputPath(conf, new Path(HDFS_OUTPUT_PATH));
conf.setInputFormatClass(TextInputFormat.class);
Scheduler scheduler = new Scheduler(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
InputFormat inputFormat = ReflectionUtils.newInstance(conf.getInputFormatClass(), getConfiguration());
List<InputSplit> splits = inputFormat.getSplits(conf);
String[] readSchedule = scheduler.getLocationConstraints(splits);
JobSpecification jobSpec = new JobSpecification();
RecordDescriptor recordDesc = new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits, readSchedule, new TextKeyValueParserFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
ExternalSortOperatorDescriptor sortOperator = new ExternalSortOperatorDescriptor(jobSpec, 10, new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf, new TextTupleWriterFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }), new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null), sortOperator, 0, writeOperator, 0);
jobSpec.addRoot(writeOperator);
IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
JobId jobId = client.startJob(jobSpec);
client.waitForCompletion(jobId);
Assert.assertEquals(true, checkResults());
}
use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class BuildJobProfilesWork method doRun.
@Override
protected void doRun() throws Exception {
List<JobProfile> profiles = new ArrayList<JobProfile>();
Map<JobId, Joblet> jobletMap = ncs.getJobletMap();
for (Joblet ji : jobletMap.values()) {
profiles.add(new JobProfile(ji.getJobId()));
}
for (JobProfile jProfile : profiles) {
Joblet ji;
JobletProfile jobletProfile = new JobletProfile(ncs.getId());
ji = jobletMap.get(jProfile.getJobId());
if (ji != null) {
ji.dumpProfile(jobletProfile);
jProfile.getJobletProfiles().put(ncs.getId(), jobletProfile);
}
}
fv.setValue(profiles);
}
use of org.apache.hyracks.api.job.JobId in project asterixdb by apache.
the class ReportPartitionAvailabilityWork method run.
@Override
public void run() {
try {
Map<JobId, Joblet> jobletMap = ncs.getJobletMap();
Joblet ji = jobletMap.get(pid.getJobId());
if (ji != null) {
PartitionChannel channel = new PartitionChannel(pid, new NetworkInputChannel(ncs.getNetworkManager(), new InetSocketAddress(InetAddress.getByAddress(networkAddress.lookupIpAddress()), networkAddress.getPort()), pid, 5));
ji.reportPartitionAvailability(channel);
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
Aggregations