Search in sources :

Example 31 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class SummariseGroupOverRangesHandler method doOperation.

public CloseableIterable<Element> doOperation(final SummariseGroupOverRanges<Pair<ElementSeed>, Element> operation, final User user, final AccumuloStore store) throws OperationException {
    final int numEdgeGroups = operation.getView().getEdgeGroups().size();
    final int numEntityGroups = operation.getView().getEntityGroups().size();
    if ((numEdgeGroups + numEntityGroups) != 1) {
        throw new OperationException("You may only set one Group in your view for this operation.");
    }
    final String columnFamily;
    if (numEdgeGroups == 1) {
        columnFamily = (String) operation.getView().getEdgeGroups().toArray()[0];
    } else {
        columnFamily = (String) operation.getView().getEntityGroups().toArray()[0];
    }
    final IteratorSettingFactory itrFactory = store.getKeyPackage().getIteratorFactory();
    try {
        return new AccumuloRangeIDRetriever(store, operation, user, itrFactory.getElementPreAggregationFilterIteratorSetting(operation.getView(), store), itrFactory.getElementPostAggregationFilterIteratorSetting(operation.getView(), store), itrFactory.getEdgeEntityDirectionFilterIteratorSetting(operation), itrFactory.getElementPropertyRangeQueryFilter(operation), itrFactory.getRowIDAggregatorIteratorSetting(store, columnFamily));
    } catch (IteratorSettingException | StoreException e) {
        throw new OperationException("Failed to get elements", e);
    }
}
Also used : IteratorSettingFactory(uk.gov.gchq.gaffer.accumulostore.key.IteratorSettingFactory) AccumuloRangeIDRetriever(uk.gov.gchq.gaffer.accumulostore.retriever.impl.AccumuloRangeIDRetriever) IteratorSettingException(uk.gov.gchq.gaffer.accumulostore.key.exception.IteratorSettingException) OperationException(uk.gov.gchq.gaffer.operation.OperationException) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 32 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class AddElementsFromHdfsHandler method importElements.

private void importElements(final AddElementsFromHdfs operation, final AccumuloStore store) throws OperationException {
    final ImportElementsToAccumuloTool importTool;
    final int response;
    importTool = new ImportElementsToAccumuloTool(operation.getOutputPath(), operation.getFailurePath(), store);
    try {
        LOGGER.info("Running import job");
        response = ToolRunner.run(importTool, new String[0]);
        LOGGER.info("Finished running import job");
    } catch (final Exception e) {
        LOGGER.error("Failed to import elements into Accumulo: {}", e.getMessage());
        throw new OperationException("Failed to import elements into Accumulo", e);
    }
    if (ImportElementsToAccumuloTool.SUCCESS_RESPONSE != response) {
        LOGGER.error("Failed to import elements into Accumulo. Response code was {}", response);
        throw new OperationException("Failed to import elements into Accumulo. Response code was: " + response);
    }
}
Also used : ImportElementsToAccumuloTool(uk.gov.gchq.gaffer.accumulostore.operation.hdfs.handler.job.tool.ImportElementsToAccumuloTool) OperationException(uk.gov.gchq.gaffer.operation.OperationException) OperationException(uk.gov.gchq.gaffer.operation.OperationException)

Example 33 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class SplitTableTool method run.

@Override
public int run(final String[] arg0) throws OperationException {
    LOGGER.info("Running SplitTableTool");
    final Configuration conf = getConf();
    FileSystem fs;
    try {
        fs = FileSystem.get(conf);
    } catch (final IOException e) {
        throw new OperationException("Failed to get Filesystem from configuration: " + e.getMessage(), e);
    }
    final SortedSet<Text> splits = new TreeSet<>();
    try (final BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(new Path(operation.getInputPath())), CommonConstants.UTF_8))) {
        String line = br.readLine();
        while (line != null) {
            splits.add(new Text(line));
            line = br.readLine();
        }
    } catch (final IOException e) {
        throw new OperationException(e.getMessage(), e);
    }
    try {
        store.getConnection().tableOperations().addSplits(store.getProperties().getTable(), splits);
        LOGGER.info("Added {} splits to table {}", splits.size(), store.getProperties().getTable());
    } catch (final TableNotFoundException | AccumuloException | AccumuloSecurityException | StoreException e) {
        LOGGER.error("Failed to add {} split points to table {}", splits.size(), store.getProperties().getTable());
        throw new OperationException("Failed to add split points to the table specified: " + e.getMessage(), e);
    }
    return SUCCESS_RESPONSE;
}
Also used : Path(org.apache.hadoop.fs.Path) AccumuloException(org.apache.accumulo.core.client.AccumuloException) Configuration(org.apache.hadoop.conf.Configuration) InputStreamReader(java.io.InputStreamReader) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) StoreException(uk.gov.gchq.gaffer.store.StoreException) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) TreeSet(java.util.TreeSet) FileSystem(org.apache.hadoop.fs.FileSystem) BufferedReader(java.io.BufferedReader) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) OperationException(uk.gov.gchq.gaffer.operation.OperationException)

Example 34 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class AddElementsFromHdfsIT method shouldThrowExceptionWhenAddElementsFromHdfsWhenOutputDirectoryContainsFiles.

@Test
public void shouldThrowExceptionWhenAddElementsFromHdfsWhenOutputDirectoryContainsFiles() throws Exception {
    final FileSystem fs = FileSystem.getLocal(createLocalConf());
    fs.mkdirs(new Path(outputDir));
    try (final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(outputDir + "/someFile.txt"), true)))) {
        writer.write("Some content");
    }
    try {
        addElementsFromHdfs(ByteEntityKeyPackage.class);
        fail("Exception expected");
    } catch (final OperationException e) {
        assertEquals("Output directory exists and is not empty: " + outputDir, e.getCause().getMessage());
    }
    try {
        addElementsFromHdfs(ClassicKeyPackage.class);
        fail("Exception expected");
    } catch (final OperationException e) {
        assertEquals("Output directory exists and is not empty: " + outputDir, e.getCause().getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) OutputStreamWriter(java.io.OutputStreamWriter) OperationException(uk.gov.gchq.gaffer.operation.OperationException) BufferedWriter(java.io.BufferedWriter) Test(org.junit.Test)

Example 35 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class Store method executeJob.

/**
     * Executes a given operation chain job and returns the job detail.
     *
     * @param operationChain the operation chain to execute.
     * @param user           the user executing the job
     * @return the job detail
     * @throws OperationException thrown if jobs are not configured.
     */
public JobDetail executeJob(final OperationChain<?> operationChain, final User user) throws OperationException {
    if (null == jobTracker) {
        throw new OperationException("Running jobs has not configured.");
    }
    final Context context = createContext(user);
    if (isSupported(ExportToGafferResultCache.class)) {
        boolean hasExport = false;
        for (final Operation operation : operationChain.getOperations()) {
            if (operation instanceof ExportToGafferResultCache) {
                hasExport = true;
                break;
            }
        }
        if (!hasExport) {
            operationChain.getOperations().add(new ExportToGafferResultCache());
        }
    }
    final JobDetail initialJobDetail = addOrUpdateJobDetail(operationChain, context, null, JobStatus.RUNNING);
    new Thread(() -> {
        try {
            _execute(operationChain, context);
            addOrUpdateJobDetail(operationChain, context, null, JobStatus.FINISHED);
        } catch (final Throwable t) {
            LOGGER.warn("Operation chain job failed to execute", t);
            addOrUpdateJobDetail(operationChain, context, t.getMessage(), JobStatus.FAILED);
        }
    }).start();
    return initialJobDetail;
}
Also used : JobDetail(uk.gov.gchq.gaffer.jobtracker.JobDetail) ExportToGafferResultCache(uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache) Operation(uk.gov.gchq.gaffer.operation.Operation) OperationException(uk.gov.gchq.gaffer.operation.OperationException)

Aggregations

OperationException (uk.gov.gchq.gaffer.operation.OperationException)38 User (uk.gov.gchq.gaffer.user.User)9 StoreException (uk.gov.gchq.gaffer.store.StoreException)7 Element (uk.gov.gchq.gaffer.data.element.Element)6 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)4 Configuration (org.apache.hadoop.conf.Configuration)4 FileSystem (org.apache.hadoop.fs.FileSystem)4 Path (org.apache.hadoop.fs.Path)4 Test (org.junit.Test)4 Graph (uk.gov.gchq.gaffer.graph.Graph)4 SparkConf (org.apache.spark.SparkConf)3 IteratorSettingException (uk.gov.gchq.gaffer.accumulostore.key.exception.IteratorSettingException)3 Edge (uk.gov.gchq.gaffer.data.element.Edge)3 JobDetail (uk.gov.gchq.gaffer.jobtracker.JobDetail)3 AddElements (uk.gov.gchq.gaffer.operation.impl.add.AddElements)3 BufferedWriter (java.io.BufferedWriter)2 ByteArrayInputStream (java.io.ByteArrayInputStream)2 DataInputStream (java.io.DataInputStream)2 OutputStreamWriter (java.io.OutputStreamWriter)2