Search in sources :

Example 1 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class FetchElementsFromHdfsTool method run.

@Override
public int run(final String[] strings) throws Exception {
    checkHdfsDirectories(operation);
    LOGGER.info("Ensuring table {} exists", store.getProperties().getTable());
    TableUtils.ensureTableExists(store);
    LOGGER.info("Adding elements from HDFS");
    final Job job = new AccumuloAddElementsFromHdfsJobFactory().createJob(operation, store);
    job.waitForCompletion(true);
    if (!job.isSuccessful()) {
        LOGGER.error("Error running job");
        throw new OperationException("Error running job");
    }
    LOGGER.info("Finished adding elements from HDFS");
    return SUCCESS_RESPONSE;
}
Also used : AccumuloAddElementsFromHdfsJobFactory(uk.gov.gchq.gaffer.accumulostore.operation.hdfs.handler.job.factory.AccumuloAddElementsFromHdfsJobFactory) Job(org.apache.hadoop.mapreduce.Job) OperationException(uk.gov.gchq.gaffer.operation.OperationException)

Example 2 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class SampleDataAndCreateSplitsFileTool method run.

@Override
public int run(final String[] strings) throws OperationException {
    try {
        LOGGER.info("Creating job using SampleDataForSplitPointsJobFactory");
        job = new SampleDataForSplitPointsJobFactory().createJob(operation, store);
    } catch (final IOException e) {
        LOGGER.error("Failed to create Hadoop job: {}", e.getMessage());
        throw new OperationException("Failed to create the Hadoop job: " + e.getMessage(), e);
    }
    try {
        LOGGER.info("Running SampleDataForSplitPoints job (job name is {})", job.getJobName());
        job.waitForCompletion(true);
    } catch (final IOException | InterruptedException | ClassNotFoundException e) {
        LOGGER.error("Exception running job: {}", e.getMessage());
        throw new OperationException("Error while waiting for job to complete: " + e.getMessage(), e);
    }
    try {
        if (!job.isSuccessful()) {
            LOGGER.error("Job was not successful (job name is {})", job.getJobName());
            throw new OperationException("Error running job");
        }
    } catch (final IOException e) {
        LOGGER.error("Exception running job: {}", e.getMessage());
        throw new OperationException("Error running job" + e.getMessage(), e);
    }
    // Find the number of records output
    // NB In the following line use mapred.Task.Counter.REDUCE_OUTPUT_RECORDS rather than
    // mapreduce.TaskCounter.REDUCE_OUTPUT_RECORDS as this is more compatible with earlier
    // versions of Hadoop.
    Counter counter;
    try {
        counter = job.getCounters().findCounter(Task.Counter.REDUCE_OUTPUT_RECORDS);
        LOGGER.info("Number of records output = {}", counter);
    } catch (final IOException e) {
        LOGGER.error("Failed to get counter org.apache.hadoop.mapred.Task.Counter.REDUCE_OUTPUT_RECORDS from job: {}", e.getMessage());
        throw new OperationException("Failed to get counter: " + Task.Counter.REDUCE_OUTPUT_RECORDS, e);
    }
    int numberTabletServers;
    try {
        numberTabletServers = store.getConnection().instanceOperations().getTabletServers().size();
        LOGGER.info("Number of tablet servers is {}", numberTabletServers);
    } catch (final StoreException e) {
        LOGGER.error("Exception thrown getting number of tablet servers: {}", e.getMessage());
        throw new OperationException(e.getMessage(), e);
    }
    long outputEveryNthRecord = counter.getValue() / (numberTabletServers - 1);
    final Path resultsFile = new Path(operation.getOutputPath(), "part-r-00000");
    LOGGER.info("Will output every {}-th record from {}", outputEveryNthRecord, resultsFile);
    // Read through resulting file, pick out the split points and write to file.
    final Configuration conf = getConf();
    final FileSystem fs;
    try {
        fs = FileSystem.get(conf);
    } catch (final IOException e) {
        LOGGER.error("Exception getting filesystem: {}", e.getMessage());
        throw new OperationException("Failed to get filesystem from configuration: " + e.getMessage(), e);
    }
    LOGGER.info("Writing splits to {}", operation.getResultingSplitsFilePath());
    final Key key = new Key();
    final Value value = new Value();
    long count = 0;
    int numberSplitPointsOutput = 0;
    try (final SequenceFile.Reader reader = new SequenceFile.Reader(fs, resultsFile, conf);
        final PrintStream splitsWriter = new PrintStream(new BufferedOutputStream(fs.create(new Path(operation.getResultingSplitsFilePath()), true)), false, CommonConstants.UTF_8)) {
        while (reader.next(key, value) && numberSplitPointsOutput < numberTabletServers - 1) {
            count++;
            if (count % outputEveryNthRecord == 0) {
                LOGGER.debug("Outputting split point number {} ({})", numberSplitPointsOutput, Base64.encodeBase64(key.getRow().getBytes()));
                numberSplitPointsOutput++;
                splitsWriter.println(new String(Base64.encodeBase64(key.getRow().getBytes()), CommonConstants.UTF_8));
            }
        }
        LOGGER.info("Total number of records read was {}", count);
    } catch (final IOException e) {
        LOGGER.error("Exception reading results file and outputting split points: {}", e.getMessage());
        throw new OperationException(e.getMessage(), e);
    }
    try {
        fs.delete(resultsFile, true);
        LOGGER.info("Deleted the results file {}", resultsFile);
    } catch (final IOException e) {
        LOGGER.error("Failed to delete the results file {}", resultsFile);
        throw new OperationException("Failed to delete the results file: " + e.getMessage(), e);
    }
    return SUCCESS_RESPONSE;
}
Also used : SampleDataForSplitPointsJobFactory(uk.gov.gchq.gaffer.accumulostore.operation.hdfs.handler.job.factory.SampleDataForSplitPointsJobFactory) Path(org.apache.hadoop.fs.Path) PrintStream(java.io.PrintStream) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) StoreException(uk.gov.gchq.gaffer.store.StoreException) Counter(org.apache.hadoop.mapreduce.Counter) SequenceFile(org.apache.hadoop.io.SequenceFile) FileSystem(org.apache.hadoop.fs.FileSystem) Value(org.apache.accumulo.core.data.Value) BufferedOutputStream(java.io.BufferedOutputStream) OperationException(uk.gov.gchq.gaffer.operation.OperationException) Key(org.apache.accumulo.core.data.Key)

Example 3 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class AddElementsFromHdfsIT method shouldThrowExceptionWhenAddElementsFromHdfsWhenFailureDirectoryContainsFiles.

@Test
public void shouldThrowExceptionWhenAddElementsFromHdfsWhenFailureDirectoryContainsFiles() throws Exception {
    final FileSystem fs = FileSystem.getLocal(createLocalConf());
    fs.mkdirs(new Path(failureDir));
    try (final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(failureDir + "/someFile.txt"), true)))) {
        writer.write("Some content");
    }
    try {
        addElementsFromHdfs(ByteEntityKeyPackage.class);
        fail("Exception expected");
    } catch (final OperationException e) {
        assertEquals("Failure directory is not empty: " + failureDir, e.getCause().getMessage());
    }
    //Previous job will output data successfully to the output dir but not load it.
    fs.delete(new Path(outputDir), true);
    try {
        addElementsFromHdfs(ClassicKeyPackage.class);
        fail("Exception expected");
    } catch (final OperationException e) {
        assertEquals("Failure directory is not empty: " + failureDir, e.getCause().getMessage());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) OutputStreamWriter(java.io.OutputStreamWriter) OperationException(uk.gov.gchq.gaffer.operation.OperationException) BufferedWriter(java.io.BufferedWriter) Test(org.junit.Test)

Example 4 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class AccumuloRangeIDRetrieverTest method setupGraph.

private static void setupGraph(final AccumuloStore store, int numEntries) {
    final List<Element> elements = new ArrayList<>();
    for (int i = 0; i < numEntries; i++) {
        final Edge edge = new Edge(TestGroups.EDGE);
        String s = "" + i;
        while (s.length() < 4) {
            s = "0" + s;
        }
        edge.setSource(s);
        edge.setDestination("B");
        edge.setDirected(false);
        elements.add(edge);
    }
    try {
        final User user = new User();
        store.execute(new AddElements(elements), user);
    } catch (OperationException e) {
        fail("Couldn't add element: " + e);
    }
}
Also used : AddElements(uk.gov.gchq.gaffer.operation.impl.add.AddElements) User(uk.gov.gchq.gaffer.user.User) Element(uk.gov.gchq.gaffer.data.element.Element) ArrayList(java.util.ArrayList) Edge(uk.gov.gchq.gaffer.data.element.Edge) OperationException(uk.gov.gchq.gaffer.operation.OperationException)

Example 5 with OperationException

use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.

the class ProxyStore method executeOpChainViaUrl.

protected <OUTPUT> OUTPUT executeOpChainViaUrl(final OperationChain<OUTPUT> operationChain, final Context context) throws OperationException {
    final String opChainJson;
    try {
        opChainJson = new String(jsonSerialiser.serialise(operationChain), CommonConstants.UTF_8);
    } catch (final UnsupportedEncodingException | SerialisationException e) {
        throw new OperationException("Unable to serialise operation chain into JSON.", e);
    }
    final URL url = getProperties().getGafferUrl("graph/doOperation");
    try {
        return doPost(url, opChainJson, operationChain.getOutputTypeReference(), context);
    } catch (final StoreException e) {
        throw new OperationException(e.getMessage(), e);
    }
}
Also used : SerialisationException(uk.gov.gchq.gaffer.exception.SerialisationException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) OperationException(uk.gov.gchq.gaffer.operation.OperationException) URL(java.net.URL) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Aggregations

OperationException (uk.gov.gchq.gaffer.operation.OperationException)38 User (uk.gov.gchq.gaffer.user.User)9 StoreException (uk.gov.gchq.gaffer.store.StoreException)7 Element (uk.gov.gchq.gaffer.data.element.Element)6 IOException (java.io.IOException)4 ArrayList (java.util.ArrayList)4 Configuration (org.apache.hadoop.conf.Configuration)4 FileSystem (org.apache.hadoop.fs.FileSystem)4 Path (org.apache.hadoop.fs.Path)4 Test (org.junit.Test)4 Graph (uk.gov.gchq.gaffer.graph.Graph)4 SparkConf (org.apache.spark.SparkConf)3 IteratorSettingException (uk.gov.gchq.gaffer.accumulostore.key.exception.IteratorSettingException)3 Edge (uk.gov.gchq.gaffer.data.element.Edge)3 JobDetail (uk.gov.gchq.gaffer.jobtracker.JobDetail)3 AddElements (uk.gov.gchq.gaffer.operation.impl.add.AddElements)3 BufferedWriter (java.io.BufferedWriter)2 ByteArrayInputStream (java.io.ByteArrayInputStream)2 DataInputStream (java.io.DataInputStream)2 OutputStreamWriter (java.io.OutputStreamWriter)2