use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class OperationExample method createExampleGraph.
protected Graph createExampleGraph() {
final Graph graph = new Graph.Builder().addSchemas(StreamUtil.openStreams(getClass(), "/example/operation/schema")).storeProperties(StreamUtil.openStream(getClass(), "/example/operation/mockaccumulostore.properties")).build();
// Create data generator
final DataGenerator dataGenerator = new DataGenerator();
// Load data into memory
final List<String> data = DataUtils.loadData(StreamUtil.openStream(getClass(), "/example/operation/data.txt", true));
//add the edges to the graph using an operation chain consisting of:
//generateElements - generating edges from the data (note these are directed edges)
//addElements - add the edges to the graph
final OperationChain addOpChain = new OperationChain.Builder().first(new GenerateElements.Builder<String>().generator(dataGenerator).objects(data).build()).then(new AddElements()).build();
try {
graph.execute(addOpChain, new User());
} catch (OperationException e) {
throw new RuntimeException(e);
}
return graph;
}
use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class GetDataFrameOfElementsExample method runExamples.
@Override
public void runExamples() {
// Need to actively turn logging on and off as needed as Spark produces some logs
// even when the log level is set to off.
ROOT_LOGGER.setLevel(Level.OFF);
final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("getDataFrameOfElementsWithEntityGroup").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
final SparkContext sc = new SparkContext(sparkConf);
sc.setLogLevel("OFF");
final SQLContext sqlc = new SQLContext(sc);
final Graph graph = getGraph();
try {
getDataFrameOfElementsWithEntityGroup(sqlc, graph);
getDataFrameOfElementsWithEdgeGroup(sqlc, graph);
} catch (final OperationException e) {
throw new RuntimeException(e);
}
sc.stop();
ROOT_LOGGER.setLevel(Level.INFO);
}
use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class GetJavaRDDOfAllElementsExample method runExamples.
@Override
public void runExamples() {
// Need to actively turn logging on and off as needed as Spark produces some logs
// even when the log level is set to off.
ROOT_LOGGER.setLevel(Level.OFF);
final SparkConf sparkConf = new SparkConf().setMaster("local").setAppName("GetJavaRDDOfAllElementsExample").set("spark.serializer", "org.apache.spark.serializer.KryoSerializer").set("spark.kryo.registrator", "uk.gov.gchq.gaffer.spark.serialisation.kryo.Registrator").set("spark.driver.allowMultipleContexts", "true");
final JavaSparkContext sc = new JavaSparkContext(sparkConf);
sc.setLogLevel("OFF");
final Graph graph = getGraph();
try {
getJavaRddOfAllElements(sc, graph);
getJavaRddOfAllElementsReturningEdgesOnly(sc, graph);
} catch (final OperationException e) {
throw new RuntimeException(e);
}
sc.stop();
ROOT_LOGGER.setLevel(Level.INFO);
}
use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class AccumuloKeyRangePartitioner method getSplits.
public static synchronized String[] getSplits(final AccumuloStore store) throws OperationException {
final Connector connector;
try {
connector = store.getConnection();
} catch (StoreException e) {
throw new OperationException("Failed to create accumulo connection", e);
}
final String table = store.getProperties().getTable();
try {
final Collection<Text> splits = connector.tableOperations().listSplits(table);
final String[] arr = new String[splits.size()];
return splits.parallelStream().map(text -> text.toString()).collect(Collectors.toList()).toArray(arr);
} catch (TableNotFoundException | AccumuloSecurityException | AccumuloException e) {
throw new OperationException("Failed to get accumulo split points from table " + table, e);
}
}
use of uk.gov.gchq.gaffer.operation.OperationException in project Gaffer by gchq.
the class AbstractImportKeyValuePairRDDToAccumuloHandler method getConfiguration.
protected Configuration getConfiguration(final T operation) throws OperationException {
final Configuration conf = new Configuration();
final String serialisedConf = operation.getOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY);
if (serialisedConf != null) {
try {
final ByteArrayInputStream bais = new ByteArrayInputStream(serialisedConf.getBytes(CommonConstants.UTF_8));
conf.readFields(new DataInputStream(bais));
} catch (final IOException e) {
throw new OperationException("Exception decoding Configuration from options", e);
}
}
return conf;
}
Aggregations