Search in sources :

Example 26 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class ParquetStoreTest method shouldNotFailSettingSnapshotWhenSnapshotExists.

@Test
public void shouldNotFailSettingSnapshotWhenSnapshotExists(@TempDir java.nio.file.Path tempDir) throws IOException {
    // Given
    final ParquetStoreProperties properties = getParquetStoreProperties(tempDir);
    ParquetStore store = (ParquetStore) ParquetStore.createStore("G", TestUtils.gafferSchema("schemaUsingStringVertexType"), properties);
    Files.createDirectories(tempDir.resolve("data").resolve(ParquetStore.getSnapshotPath(12345L)));
    // When / Then
    try {
        store.setLatestSnapshot(12345L);
    } catch (StoreException e) {
        fail("StoreException should not have been thrown. Message is:\n" + e.getMessage());
    }
}
Also used : TestUtils.getParquetStoreProperties(uk.gov.gchq.gaffer.parquetstore.testutils.TestUtils.getParquetStoreProperties) StoreException(uk.gov.gchq.gaffer.store.StoreException) Test(org.junit.jupiter.api.Test)

Example 27 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class SplitStoreFromIterableHandler method doOperation.

private void doOperation(final SplitStoreFromIterable<String> operation, final AccumuloStore store) throws OperationException {
    if (null == operation.getInput()) {
        throw new OperationException("Operation input is required.");
    }
    final SortedSet<Text> splits = new TreeSet<>();
    for (final String split : operation.getInput()) {
        splits.add(new Text(Base64.decodeBase64(split)));
    }
    try {
        store.getConnection().tableOperations().addSplits(store.getTableName(), splits);
        LOGGER.info("Added {} splits to table {}", splits.size(), store.getTableName());
    } catch (final TableNotFoundException | AccumuloException | AccumuloSecurityException | StoreException e) {
        LOGGER.error("Failed to add {} split points to table {}", splits.size(), store.getTableName());
        throw new RuntimeException("Failed to add split points: " + e.getMessage(), e);
    }
}
Also used : TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) AccumuloException(org.apache.accumulo.core.client.AccumuloException) TreeSet(java.util.TreeSet) Text(org.apache.hadoop.io.Text) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) OperationException(uk.gov.gchq.gaffer.operation.OperationException) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 28 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class AccumuloAddElementsFromHdfsJobFactory method setUpPartitionerGenerateSplitsFile.

protected void setUpPartitionerGenerateSplitsFile(final Job job, final AddElementsFromHdfs operation, final AccumuloStore store) throws IOException {
    final String splitsFilePath = operation.getSplitsFilePath();
    LOGGER.info("Creating splits file in location {} from table {}", splitsFilePath, store.getTableName());
    final int minReducers;
    final int maxReducers;
    int numReducers;
    if (validateValue(operation.getNumReduceTasks()) != 0) {
        minReducers = validateValue(operation.getNumReduceTasks());
        maxReducers = validateValue(operation.getNumReduceTasks());
    } else {
        minReducers = validateValue(operation.getMinReduceTasks());
        maxReducers = validateValue(operation.getMaxReduceTasks());
    }
    try {
        numReducers = 1 + IngestUtils.createSplitsFile(store.getConnection(), store.getTableName(), FileSystem.get(job.getConfiguration()), new Path(splitsFilePath));
        if (maxReducers != 0 && maxReducers < numReducers) {
            // maxReducers set and Accumulo given more reducers than we want
            numReducers = 1 + IngestUtils.createSplitsFile(store.getConnection(), store.getTableName(), FileSystem.get(job.getConfiguration()), new Path(splitsFilePath), maxReducers - 1);
        }
    } catch (final StoreException e) {
        throw new RuntimeException(e.getMessage(), e);
    }
    if (minReducers != 0 && numReducers < minReducers) {
        // minReducers set and Accumulo given less reducers than we want, set the appropriate number of subbins
        LOGGER.info("Number of reducers is {} which is less than the specified minimum number of {}", numReducers, minReducers);
        int factor = (minReducers / numReducers) + 1;
        LOGGER.info("Setting number of subbins on GafferKeyRangePartitioner to {}", factor);
        GafferKeyRangePartitioner.setNumSubBins(job, factor);
        numReducers = numReducers * factor;
        LOGGER.info("Number of reducers is {}", numReducers);
    }
    if (maxReducers != 0 && numReducers > maxReducers) {
        throw new IllegalArgumentException(minReducers + " - " + maxReducers + " is not a valid range, consider increasing the maximum reducers to at least " + numReducers);
    }
    job.setNumReduceTasks(numReducers);
    job.setPartitionerClass(GafferKeyRangePartitioner.class);
    GafferKeyRangePartitioner.setSplitFile(job, splitsFilePath);
}
Also used : Path(org.apache.hadoop.fs.Path) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 29 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class AddElementsFromHdfsHandler method needsSplitting.

/**
 * If there are less then 2 split points and more than 1 tablet server
 * we should calculate new splits.
 *
 * @param store the accumulo store
 * @return true if the table needs splitting
 * @throws OperationException if calculating the number of splits or the number of tablet servers fails.
 */
private boolean needsSplitting(final AccumuloStore store) throws OperationException {
    boolean needsSplitting = false;
    final boolean lessThan2Splits;
    try {
        lessThan2Splits = store.getConnection().tableOperations().listSplits(store.getTableName(), 2).size() < 2;
    } catch (final TableNotFoundException | AccumuloSecurityException | StoreException | AccumuloException e) {
        throw new OperationException("Unable to get accumulo's split points", e);
    }
    if (lessThan2Splits) {
        final int numberTabletServers;
        try {
            numberTabletServers = store.getTabletServers().size();
        } catch (final StoreException e) {
            throw new OperationException("Unable to get accumulo's tablet servers", e);
        }
        if (numberTabletServers > 1) {
            needsSplitting = true;
        }
    }
    return needsSplitting;
}
Also used : TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) AccumuloException(org.apache.accumulo.core.client.AccumuloException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) OperationException(uk.gov.gchq.gaffer.operation.OperationException) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 30 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class AddUpdateTableIterator method main.

/**
 * Utility for creating and updating an Accumulo table.
 * Accumulo tables are automatically created when the Gaffer Accumulo store
 * is initialised when an instance of Graph is created.
 * <p>
 * Running this with an existing table will remove the existing iterators
 * and recreate them with the provided schema.
 * </p>
 * <p>
 * A FileGraphLibrary path must be specified as an argument.  If no path is set NoGraphLibrary will be used.
 * </p>
 * <p>
 * Usage: java -cp accumulo-store-[version]-utility.jar uk.gov.gchq.gaffer.accumulostore.utils.AddUpdateTableIterator [graphId] [pathToSchemaDirectory] [pathToStoreProperties] [pathToFileGraphLibrary]
 * </p>
 *
 * @param args [graphId] [schema directory path] [store properties path] [ file graph library path]
 * @throws Exception if the tables fails to be created/updated
 */
public static void main(final String[] args) throws Exception {
    if (args.length < NUM_REQUIRED_ARGS) {
        System.err.println("Wrong number of arguments. \nUsage: " + "<graphId> " + "<comma separated schema paths> <store properties path> " + "<" + ADD_KEY + "," + REMOVE_KEY + " or " + UPDATE_KEY + "> " + "<file graph library path>");
        System.exit(1);
    }
    final AccumuloProperties storeProps = AccumuloProperties.loadStoreProperties(getAccumuloPropertiesPath(args));
    if (null == storeProps) {
        throw new IllegalArgumentException("Store properties are required to create a store");
    }
    final Schema schema = Schema.fromJson(getSchemaPaths(args));
    GraphLibrary library;
    if (null == getFileGraphLibraryPathString(args)) {
        library = new NoGraphLibrary();
    } else {
        library = new FileGraphLibrary(getFileGraphLibraryPathString(args));
    }
    library.addOrUpdate(getGraphId(args), schema, storeProps);
    final String storeClass = storeProps.getStoreClass();
    if (null == storeClass) {
        throw new IllegalArgumentException("The Store class name was not found in the store properties for key: " + StoreProperties.STORE_CLASS);
    }
    final AccumuloStore store;
    try {
        store = Class.forName(storeClass).asSubclass(AccumuloStore.class).newInstance();
    } catch (final InstantiationException | IllegalAccessException | ClassNotFoundException e) {
        throw new IllegalArgumentException("Could not create store of type: " + storeClass, e);
    }
    try {
        store.preInitialise(getGraphId(args), schema, storeProps);
    } catch (final StoreException e) {
        throw new IllegalArgumentException("Could not initialise the store with provided arguments.", e);
    }
    if (!store.getConnection().tableOperations().exists(store.getTableName())) {
        TableUtils.createTable(store);
    }
    final String modifyKey = getModifyKey(args);
    switch(modifyKey) {
        case UPDATE_KEY:
            for (final String iterator : ITERATORS) {
                updateIterator(store, iterator);
            }
            break;
        case ADD_KEY:
            for (final String iterator : ITERATORS) {
                addIterator(store, iterator);
            }
            break;
        case REMOVE_KEY:
            for (final String iterator : ITERATORS) {
                removeIterator(store, iterator);
            }
            break;
        default:
            throw new IllegalArgumentException("Supplied add or update key (" + modifyKey + ") was not valid, it must either be " + ADD_KEY + "," + REMOVE_KEY + " or " + UPDATE_KEY + ".");
    }
}
Also used : AccumuloProperties(uk.gov.gchq.gaffer.accumulostore.AccumuloProperties) Schema(uk.gov.gchq.gaffer.store.schema.Schema) StoreException(uk.gov.gchq.gaffer.store.StoreException) NoGraphLibrary(uk.gov.gchq.gaffer.store.library.NoGraphLibrary) FileGraphLibrary(uk.gov.gchq.gaffer.store.library.FileGraphLibrary) GraphLibrary(uk.gov.gchq.gaffer.store.library.GraphLibrary) NoGraphLibrary(uk.gov.gchq.gaffer.store.library.NoGraphLibrary) FileGraphLibrary(uk.gov.gchq.gaffer.store.library.FileGraphLibrary) AccumuloStore(uk.gov.gchq.gaffer.accumulostore.AccumuloStore)

Aggregations

StoreException (uk.gov.gchq.gaffer.store.StoreException)70 OperationException (uk.gov.gchq.gaffer.operation.OperationException)26 IOException (java.io.IOException)21 Path (org.apache.hadoop.fs.Path)11 Schema (uk.gov.gchq.gaffer.store.schema.Schema)11 HashSet (java.util.HashSet)10 AccumuloSecurityException (org.apache.accumulo.core.client.AccumuloSecurityException)10 Element (uk.gov.gchq.gaffer.data.element.Element)10 UnsupportedEncodingException (java.io.UnsupportedEncodingException)9 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)9 IteratorSettingException (uk.gov.gchq.gaffer.accumulostore.key.exception.IteratorSettingException)9 SerialisationException (uk.gov.gchq.gaffer.exception.SerialisationException)9 ArrayList (java.util.ArrayList)8 AccumuloException (org.apache.accumulo.core.client.AccumuloException)8 Configuration (org.apache.hadoop.conf.Configuration)8 Test (org.junit.jupiter.api.Test)8 User (uk.gov.gchq.gaffer.user.User)8 Set (java.util.Set)6 IteratorSetting (org.apache.accumulo.core.client.IteratorSetting)6 FileSystem (org.apache.hadoop.fs.FileSystem)6