use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class SummariseGroupOverRangesHandler method doOperation.
public CloseableIterable<Element> doOperation(final SummariseGroupOverRanges<Pair<ElementSeed>, Element> operation, final User user, final AccumuloStore store) throws OperationException {
final int numEdgeGroups = operation.getView().getEdgeGroups().size();
final int numEntityGroups = operation.getView().getEntityGroups().size();
if ((numEdgeGroups + numEntityGroups) != 1) {
throw new OperationException("You may only set one Group in your view for this operation.");
}
final String columnFamily;
if (numEdgeGroups == 1) {
columnFamily = (String) operation.getView().getEdgeGroups().toArray()[0];
} else {
columnFamily = (String) operation.getView().getEntityGroups().toArray()[0];
}
final IteratorSettingFactory itrFactory = store.getKeyPackage().getIteratorFactory();
try {
return new AccumuloRangeIDRetriever(store, operation, user, itrFactory.getElementPreAggregationFilterIteratorSetting(operation.getView(), store), itrFactory.getElementPostAggregationFilterIteratorSetting(operation.getView(), store), itrFactory.getEdgeEntityDirectionFilterIteratorSetting(operation), itrFactory.getElementPropertyRangeQueryFilter(operation), itrFactory.getRowIDAggregatorIteratorSetting(store, columnFamily));
} catch (IteratorSettingException | StoreException e) {
throw new OperationException("Failed to get elements", e);
}
}
use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class AddElementsFromHdfsHandler method importElements.
private void importElements(final AddElementsFromHdfs operation, final AccumuloStore store) throws OperationException {
final ImportElementsToAccumuloTool importTool;
final int response;
importTool = new ImportElementsToAccumuloTool(operation.getOutputPath(), operation.getFailurePath(), store);
try {
LOGGER.info("Running import job");
response = ToolRunner.run(importTool, new String[0]);
LOGGER.info("Finished running import job");
} catch (final Exception e) {
LOGGER.error("Failed to import elements into Accumulo: {}", e.getMessage());
throw new OperationException("Failed to import elements into Accumulo", e);
}
if (ImportElementsToAccumuloTool.SUCCESS_RESPONSE != response) {
LOGGER.error("Failed to import elements into Accumulo. Response code was {}", response);
throw new OperationException("Failed to import elements into Accumulo. Response code was: " + response);
}
}
use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class SplitTableTool method run.
@Override
public int run(final String[] arg0) throws OperationException {
LOGGER.info("Running SplitTableTool");
final Configuration conf = getConf();
FileSystem fs;
try {
fs = FileSystem.get(conf);
} catch (final IOException e) {
throw new OperationException("Failed to get Filesystem from configuration: " + e.getMessage(), e);
}
final SortedSet<Text> splits = new TreeSet<>();
try (final BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(new Path(operation.getInputPath())), CommonConstants.UTF_8))) {
String line = br.readLine();
while (line != null) {
splits.add(new Text(line));
line = br.readLine();
}
} catch (final IOException e) {
throw new OperationException(e.getMessage(), e);
}
try {
store.getConnection().tableOperations().addSplits(store.getProperties().getTable(), splits);
LOGGER.info("Added {} splits to table {}", splits.size(), store.getProperties().getTable());
} catch (final TableNotFoundException | AccumuloException | AccumuloSecurityException | StoreException e) {
LOGGER.error("Failed to add {} split points to table {}", splits.size(), store.getProperties().getTable());
throw new OperationException("Failed to add split points to the table specified: " + e.getMessage(), e);
}
return SUCCESS_RESPONSE;
}
use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class AddElementsFromHdfsIT method shouldThrowExceptionWhenAddElementsFromHdfsWhenOutputDirectoryContainsFiles.
@Test
public void shouldThrowExceptionWhenAddElementsFromHdfsWhenOutputDirectoryContainsFiles() throws Exception {
final FileSystem fs = FileSystem.getLocal(createLocalConf());
fs.mkdirs(new Path(outputDir));
try (final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(outputDir + "/someFile.txt"), true)))) {
writer.write("Some content");
}
try {
addElementsFromHdfs(ByteEntityKeyPackage.class);
fail("Exception expected");
} catch (final OperationException e) {
assertEquals("Output directory exists and is not empty: " + outputDir, e.getCause().getMessage());
}
try {
addElementsFromHdfs(ClassicKeyPackage.class);
fail("Exception expected");
} catch (final OperationException e) {
assertEquals("Output directory exists and is not empty: " + outputDir, e.getCause().getMessage());
}
}
use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class Store method executeJob.
/**
* Executes a given operation chain job and returns the job detail.
*
* @param operationChain the operation chain to execute.
* @param user the user executing the job
* @return the job detail
* @throws OperationException thrown if jobs are not configured.
*/
public JobDetail executeJob(final OperationChain<?> operationChain, final User user) throws OperationException {
if (null == jobTracker) {
throw new OperationException("Running jobs has not configured.");
}
final Context context = createContext(user);
if (isSupported(ExportToGafferResultCache.class)) {
boolean hasExport = false;
for (final Operation operation : operationChain.getOperations()) {
if (operation instanceof ExportToGafferResultCache) {
hasExport = true;
break;
}
}
if (!hasExport) {
operationChain.getOperations().add(new ExportToGafferResultCache());
}
}
final JobDetail initialJobDetail = addOrUpdateJobDetail(operationChain, context, null, JobStatus.RUNNING);
new Thread(() -> {
try {
_execute(operationChain, context);
addOrUpdateJobDetail(operationChain, context, null, JobStatus.FINISHED);
} catch (final Throwable t) {
LOGGER.warn("Operation chain job failed to execute", t);
addOrUpdateJobDetail(operationChain, context, t.getMessage(), JobStatus.FAILED);
}
}).start();
return initialJobDetail;
}
Aggregations