use of uk.gov.gchq.gaffer.accumulostore.AccumuloStore in project Gaffer by gchq.
the class AddUpdateTableIterator method main.
public static void main(final String[] args) throws StoreException, SchemaException, IOException {
if (args.length < NUM_REQUIRED_ARGS) {
System.err.println("Wrong number of arguments. \nUsage: " + "<comma separated schema paths> <store properties path> <" + ADD_KEY + "," + REMOVE_KEY + " or " + UPDATE_KEY + ">");
System.exit(1);
}
final AccumuloStore store = new AccumuloStore();
store.initialise(Schema.fromJson(getSchemaPaths(args)), AccumuloProperties.loadStoreProperties(getAccumuloPropertiesPath(args)));
final String modifyKey = getModifyKey(args);
switch(modifyKey) {
case UPDATE_KEY:
for (final String iterator : ITERATORS) {
updateIterator(store, iterator);
}
break;
case ADD_KEY:
for (final String iterator : ITERATORS) {
addIterator(store, iterator);
}
break;
case REMOVE_KEY:
for (final String iterator : ITERATORS) {
removeIterator(store, iterator);
}
break;
default:
throw new IllegalArgumentException("Supplied add or update key (" + modifyKey + ") was not valid, it must either be " + ADD_KEY + "," + REMOVE_KEY + " or " + UPDATE_KEY + ".");
}
}
use of uk.gov.gchq.gaffer.accumulostore.AccumuloStore in project Gaffer by gchq.
the class AddElementsFromHdfsIT method createGraph.
private Graph createGraph(final Class<? extends AccumuloKeyPackage> keyPackageClass) throws StoreException {
final Schema schema = Schema.fromJson(StreamUtil.schemas(getClass()));
final AccumuloProperties properties = AccumuloProperties.loadStoreProperties(StreamUtil.storeProps(getClass()));
properties.setKeyPackageClass(keyPackageClass.getName());
properties.setInstance("instance_" + keyPackageClass.getName());
final AccumuloStore store = new MockAccumuloStore();
store.initialise(schema, properties);
store.updateConfiguration(createLocalConf(), new View(), new User());
return new Graph.Builder().store(store).build();
}
use of uk.gov.gchq.gaffer.accumulostore.AccumuloStore in project Gaffer by gchq.
the class AccumuloAddElementsFromHdfsJobFactoryTest method shouldSetupJob.
@Test
public void shouldSetupJob() throws IOException {
// Given
final JobConf localConf = createLocalConf();
final FileSystem fs = FileSystem.getLocal(localConf);
fs.mkdirs(new Path(outputDir));
fs.mkdirs(new Path(splitsDir));
try (final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(splitsFile), true)))) {
writer.write("1");
}
final AccumuloAddElementsFromHdfsJobFactory factory = new AccumuloAddElementsFromHdfsJobFactory();
final Job job = mock(Job.class);
final AddElementsFromHdfs operation = new AddElementsFromHdfs.Builder().outputPath(outputDir).mapperGenerator(TextMapperGeneratorImpl.class).option(AccumuloStoreConstants.OPERATION_HDFS_USE_PROVIDED_SPLITS_FILE, "true").option(AccumuloStoreConstants.OPERATION_HDFS_SPLITS_FILE_PATH, splitsFile).build();
final AccumuloStore store = mock(AccumuloStore.class);
given(job.getConfiguration()).willReturn(localConf);
// When
factory.setupJob(job, operation, store);
// Then
verify(job).setJarByClass(factory.getClass());
verify(job).setJobName("Ingest HDFS data: Generator=" + TextMapperGeneratorImpl.class.getName() + ", output=" + outputDir);
verify(job).setMapperClass(AddElementsFromHdfsMapper.class);
verify(job).setMapOutputKeyClass(Key.class);
verify(job).setMapOutputValueClass(Value.class);
verify(job).setCombinerClass(AccumuloKeyValueReducer.class);
verify(job).setReducerClass(AccumuloKeyValueReducer.class);
verify(job).setOutputKeyClass(Key.class);
verify(job).setOutputValueClass(Value.class);
job.setOutputFormatClass(AccumuloFileOutputFormat.class);
assertEquals(fs.makeQualified(new Path(outputDir)).toString(), job.getConfiguration().get("mapreduce.output.fileoutputformat.outputdir"));
verify(job).setNumReduceTasks(2);
verify(job).setPartitionerClass(KeyRangePartitioner.class);
assertEquals(splitsFile, job.getConfiguration().get(RangePartitioner.class.getName() + ".cutFile"));
}
use of uk.gov.gchq.gaffer.accumulostore.AccumuloStore in project Gaffer by gchq.
the class AccumuloAddElementsFromHdfsJobFactoryTest method shouldSetupAccumuloPartitionerWhenSetupJobForGivenPartitionerFlag.
private void shouldSetupAccumuloPartitionerWhenSetupJobForGivenPartitionerFlag(final String partitionerFlag) throws IOException {
// Given
final JobConf localConf = createLocalConf();
final FileSystem fs = FileSystem.getLocal(localConf);
fs.mkdirs(new Path(outputDir));
fs.mkdirs(new Path(splitsDir));
try (final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(fs.create(new Path(splitsFile), true)))) {
writer.write("1");
}
final AccumuloAddElementsFromHdfsJobFactory factory = new AccumuloAddElementsFromHdfsJobFactory();
final Job job = mock(Job.class);
final AddElementsFromHdfs operation = new AddElementsFromHdfs.Builder().outputPath(outputDir).option(AccumuloStoreConstants.OPERATION_HDFS_USE_ACCUMULO_PARTITIONER, partitionerFlag).option(AccumuloStoreConstants.OPERATION_HDFS_USE_PROVIDED_SPLITS_FILE, "true").option(AccumuloStoreConstants.OPERATION_HDFS_SPLITS_FILE_PATH, splitsFile).build();
final AccumuloStore store = mock(AccumuloStore.class);
final AccumuloProperties properties = mock(AccumuloProperties.class);
given(job.getConfiguration()).willReturn(localConf);
// When
factory.setupJob(job, operation, store);
// Then
if ("false".equals(partitionerFlag)) {
verify(job, never()).setNumReduceTasks(Mockito.anyInt());
verify(job, never()).setPartitionerClass(Mockito.any(Class.class));
assertNull(job.getConfiguration().get(RangePartitioner.class.getName() + ".cutFile"));
} else {
verify(job).setNumReduceTasks(2);
verify(job).setPartitionerClass(KeyRangePartitioner.class);
assertEquals(splitsFile, job.getConfiguration().get(RangePartitioner.class.getName() + ".cutFile"));
}
}
use of uk.gov.gchq.gaffer.accumulostore.AccumuloStore in project Gaffer by gchq.
the class TableUtilsTest method shouldThrowExceptionIfTableNameIsNotSpecifiedWhenCreatingTable.
@Test(expected = AccumuloRuntimeException.class)
public void shouldThrowExceptionIfTableNameIsNotSpecifiedWhenCreatingTable() throws StoreException, TableExistsException {
// Given
final Schema schema = new Schema.Builder().type("int", Integer.class).type("string", String.class).type("boolean", Boolean.class).edge("EDGE", new SchemaEdgeDefinition.Builder().source("string").destination("string").directed("boolean").build()).build();
final AccumuloProperties properties = new AccumuloProperties();
properties.setStoreClass(SingleUseMockAccumuloStore.class.getName());
final AccumuloStore store = new AccumuloStore();
store.initialise(schema, properties);
// When
TableUtils.createTable(store);
fail("The expected exception was not thrown.");
}
Aggregations