Search in sources :

Example 56 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class ParquetStore method loadGraphPartitioner.

private void loadGraphPartitioner() throws StoreException {
    final String dataDir = getDataDir();
    try {
        if (fs.exists(new Path(dataDir))) {
            this.currentSnapshot = getLatestSnapshot(dataDir);
            LOGGER.info("Setting currentSnapshot to {}", this.currentSnapshot);
            final Path path = getGraphPartitionerPath();
            if (!fs.exists(path)) {
                LOGGER.info("Graph partitioner does not exist in {} so creating it", path);
                final GraphPartitioner partitioner = new CalculatePartitioner(new Path(dataDir + "/" + getSnapshotPath(this.currentSnapshot)), getSchema(), fs).call();
                LOGGER.info("Writing graph partitioner to {}", path);
                final FSDataOutputStream stream = fs.create(path);
                new GraphPartitionerSerialiser().write(partitioner, stream);
                stream.close();
            }
            LOGGER.info("Loading graph partitioner from path {}", path);
            loadGraphPartitioner(path);
        } else {
            throw new StoreException("Data directory " + dataDir + " does not exist - store is in an inconsistent state");
        }
    } catch (final IOException e) {
        throw new StoreException(e.getMessage(), e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) GraphPartitionerSerialiser(uk.gov.gchq.gaffer.parquetstore.partitioner.serialisation.GraphPartitionerSerialiser) GraphPartitioner(uk.gov.gchq.gaffer.parquetstore.partitioner.GraphPartitioner) CalculatePartitioner(uk.gov.gchq.gaffer.parquetstore.operation.handler.utilities.CalculatePartitioner) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) IOException(java.io.IOException) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 57 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class ParquetStore method initialise.

@Override
public void initialise(final String graphId, final Schema schema, final StoreProperties properties) throws StoreException {
    if (!(properties instanceof ParquetStoreProperties)) {
        throw new StoreException("ParquetStore must be initialised with properties of class ParquetStoreProperties");
    }
    final ParquetStoreProperties parquetStoreProperties = (ParquetStoreProperties) properties;
    if (null == parquetStoreProperties.getDataDir()) {
        throw new StoreException("The ParquetStoreProperties must contain a non-null data directory (" + ParquetStoreProperties.DATA_DIR + ")");
    }
    if (null == parquetStoreProperties.getTempFilesDir()) {
        throw new StoreException("The ParquetStoreProperties must contain a non-null temporary data directory (" + ParquetStoreProperties.TEMP_FILES_DIR + ")");
    }
    LOGGER.info("Initialising ParquetStore for graph id {}", graphId);
    super.initialise(graphId, schema, parquetStoreProperties);
    try {
        fs = FileSystem.get(new Configuration());
        schemaUtils = new SchemaUtils(getSchema());
        initialise();
        loadGraphPartitioner();
    } catch (final IOException e) {
        throw new StoreException("Could not connect to the file system", e);
    }
}
Also used : SchemaUtils(uk.gov.gchq.gaffer.parquetstore.utils.SchemaUtils) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 58 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class TableUtils method ensureTableExists.

/**
 * Ensures that the table exists, otherwise it creates it and sets it up to
 * receive Gaffer data
 *
 * @param store the hbase store
 * @throws StoreException if a connection to hbase could not be created or there is a failure to create the table
 */
public static void ensureTableExists(final HBaseStore store) throws StoreException {
    final Connection connection = store.getConnection();
    final TableName tableName = store.getTableName();
    try {
        final Admin admin = connection.getAdmin();
        if (admin.tableExists(tableName)) {
            validateTable(tableName, admin);
        } else {
            try {
                TableUtils.createTable(store);
            } catch (final Exception e) {
                if (!admin.tableExists(tableName)) {
                    if (e instanceof StoreException) {
                        throw e;
                    } else {
                        throw new StoreException("Failed to create table " + tableName, e);
                    }
                }
            // If the table exists then it must have been created in a different thread.
            }
        }
    } catch (final IOException e) {
        throw new StoreException("Failed to check if table " + tableName + " exists", e);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException) Admin(org.apache.hadoop.hbase.client.Admin) StoreException(uk.gov.gchq.gaffer.store.StoreException) IOException(java.io.IOException) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 59 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class TableUtils method createTable.

/**
 * Creates an HBase table for the given HBase store.
 *
 * @param store the hbase store
 * @throws StoreException if a connection to hbase could not be created or there is a failure to create the table
 */
public static synchronized void createTable(final HBaseStore store) throws StoreException {
    final TableName tableName = store.getTableName();
    try {
        final Admin admin = store.getConnection().getAdmin();
        if (admin.tableExists(tableName)) {
            LOGGER.info("Table {} already exists", tableName);
            return;
        }
        LOGGER.info("Creating table {}", tableName);
        final HTableDescriptor htable = new HTableDescriptor(tableName);
        final HColumnDescriptor col = new HColumnDescriptor(HBaseStoreConstants.getColFam());
        // TODO: Currently there is no way to disable versions in HBase.
        // HBase have this note in their code "Allow maxVersion of 0 to be the way you say 'Keep all versions'."
        // As soon as HBase have made this update we can set the max versions number to 0.
        col.setMaxVersions(Integer.MAX_VALUE);
        htable.addFamily(col);
        addCoprocesssor(htable, store);
        admin.createTable(htable);
    } catch (final Exception e) {
        LOGGER.warn("Failed to create table {}", tableName, e);
        throw new StoreException("Failed to create table " + tableName, e);
    }
    ensureTableExists(store);
    LOGGER.info("Table {} created", tableName);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Admin(org.apache.hadoop.hbase.client.Admin) StoreException(uk.gov.gchq.gaffer.store.StoreException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Example 60 with StoreException

use of uk.gov.gchq.gaffer.store.StoreException in project Gaffer by gchq.

the class TableUtils method deleteAllRows.

public static void deleteAllRows(final HBaseStore store, final String... auths) throws StoreException {
    final Connection connection = store.getConnection();
    try {
        if (connection.getAdmin().tableExists(store.getTableName())) {
            connection.getAdmin().flush(store.getTableName());
            final Table table = connection.getTable(store.getTableName());
            final Scan scan = new Scan();
            scan.setAuthorizations(new Authorizations(auths));
            try (ResultScanner scanner = table.getScanner(scan)) {
                final List<Delete> deletes = new ArrayList<>();
                for (final Result result : scanner) {
                    deletes.add(new Delete(result.getRow()));
                }
                table.delete(deletes);
                connection.getAdmin().flush(store.getTableName());
            }
            try (ResultScanner scanner = table.getScanner(scan)) {
                if (scanner.iterator().hasNext()) {
                    throw new StoreException("Some rows in table " + store.getTableName() + " failed to delete");
                }
            }
        }
    } catch (final IOException e) {
        throw new StoreException("Failed to delete all rows in table " + store.getTableName(), e);
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Authorizations(org.apache.hadoop.hbase.security.visibility.Authorizations) Table(org.apache.hadoop.hbase.client.Table) ResultScanner(org.apache.hadoop.hbase.client.ResultScanner) Connection(org.apache.hadoop.hbase.client.Connection) ArrayList(java.util.ArrayList) Scan(org.apache.hadoop.hbase.client.Scan) IOException(java.io.IOException) Result(org.apache.hadoop.hbase.client.Result) ValidationResult(uk.gov.gchq.koryphe.ValidationResult) StoreException(uk.gov.gchq.gaffer.store.StoreException)

Aggregations

StoreException (uk.gov.gchq.gaffer.store.StoreException)70 OperationException (uk.gov.gchq.gaffer.operation.OperationException)26 IOException (java.io.IOException)21 Path (org.apache.hadoop.fs.Path)11 Schema (uk.gov.gchq.gaffer.store.schema.Schema)11 HashSet (java.util.HashSet)10 AccumuloSecurityException (org.apache.accumulo.core.client.AccumuloSecurityException)10 Element (uk.gov.gchq.gaffer.data.element.Element)10 UnsupportedEncodingException (java.io.UnsupportedEncodingException)9 TableNotFoundException (org.apache.accumulo.core.client.TableNotFoundException)9 IteratorSettingException (uk.gov.gchq.gaffer.accumulostore.key.exception.IteratorSettingException)9 SerialisationException (uk.gov.gchq.gaffer.exception.SerialisationException)9 ArrayList (java.util.ArrayList)8 AccumuloException (org.apache.accumulo.core.client.AccumuloException)8 Configuration (org.apache.hadoop.conf.Configuration)8 Test (org.junit.jupiter.api.Test)8 User (uk.gov.gchq.gaffer.user.User)8 Set (java.util.Set)6 IteratorSetting (org.apache.accumulo.core.client.IteratorSetting)6 FileSystem (org.apache.hadoop.fs.FileSystem)6