use of org.locationtech.geowave.core.store.operations.MetadataType in project geowave by locationtech.
the class GeoWaveMultiProcessIngestIT method testMultiProcessIngest.
@Test
public void testMultiProcessIngest() throws Exception {
for (int j = 0; j < 10; j++) {
final Class<?> clazz = GeoWaveMultiProcessIngestIT.class;
final String javaHome = System.getProperty("java.home");
final String javaBin = javaHome + File.separator + "bin" + File.separator + "java";
final String className = clazz.getName();
final String jarFile = ClasspathUtils.setupPathingJarClassPath(TEMP_DIR, clazz);
final Index idx1 = SimpleIngest.createSpatialIndex();
final Index idx2 = SimpleIngest.createSpatialTemporalIndex();
final DataStore store = dataStorePluginOptions.createDataStore();
store.addIndex(idx1);
store.addIndex(idx2);
final StringBuilder indexNames = new StringBuilder();
indexNames.append(idx1.getName()).append(",").append(idx2.getName());
final Configuration conf = new Configuration();
conf.set(AbstractMapReduceIngest.INDEX_NAMES_KEY, indexNames.toString());
for (final MetadataType type : MetadataType.values()) {
// stats and index metadata writers are created elsewhere
if (!MetadataType.INDEX.equals(type) && !MetadataType.STATISTIC_VALUES.equals(type)) {
dataStorePluginOptions.createDataStoreOperations().createMetadataWriter(type).close();
}
}
GeoWaveOutputFormat.addIndex(conf, idx1);
GeoWaveOutputFormat.addIndex(conf, idx2);
GeoWaveOutputFormat.setStoreOptions(conf, dataStorePluginOptions);
Assert.assertTrue(TEMP_DIR.exists() || TEMP_DIR.mkdirs());
final File configFile = new File(TEMP_DIR, "hadoop-job.conf");
Assert.assertTrue(!configFile.exists() || configFile.delete());
Assert.assertTrue(configFile.createNewFile());
try (DataOutputStream dataOut = new DataOutputStream(new FileOutputStream(configFile))) {
conf.write(dataOut);
}
final List<ProcessBuilder> bldrs = new ArrayList<>();
for (int i = 0; i < NUM_PROCESSES; i++) {
final ArrayList<String> argList = new ArrayList<>();
argList.addAll(Arrays.asList(javaBin, "-cp", jarFile, className, new Integer(i * 10000).toString()));
final ProcessBuilder builder = new ProcessBuilder(argList);
builder.directory(TEMP_DIR);
builder.inheritIO();
bldrs.add(builder);
}
final List<Process> processes = bldrs.stream().map(b -> {
try {
return b.start();
} catch (final IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
return null;
}).collect(Collectors.toList());
Assert.assertFalse(processes.stream().anyMatch(Objects::isNull));
processes.forEach(p -> {
try {
p.waitFor();
} catch (final InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
});
try (CloseableIterator<Object> it = store.query(QueryBuilder.newBuilder().build())) {
Assert.assertEquals(2701 * NUM_PROCESSES, Iterators.size(it));
}
try (CloseableIterator<SimpleFeature> it = store.query(VectorQueryBuilder.newBuilder().indexName(idx1.getName()).build())) {
Assert.assertEquals(2701 * NUM_PROCESSES, Iterators.size(it));
}
try (CloseableIterator<SimpleFeature> it = store.query(VectorQueryBuilder.newBuilder().indexName(idx2.getName()).build())) {
Assert.assertEquals(2701 * NUM_PROCESSES, Iterators.size(it));
}
try (CloseableIterator<SimpleFeature> it = store.query(VectorQueryBuilder.newBuilder().constraints(VectorQueryBuilder.newBuilder().constraintsFactory().spatialTemporalConstraints().spatialConstraints(GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(-172, 172, -82, 82))).build()).build())) {
Assert.assertEquals(2277 * NUM_PROCESSES, Iterators.size(it));
}
final long epochTime = 1609459200000L;
final long startTime = epochTime + TimeUnit.DAYS.toMillis(15);
final long endTime = epochTime + TimeUnit.DAYS.toMillis(345);
try (CloseableIterator<SimpleFeature> it = store.query(VectorQueryBuilder.newBuilder().constraints(VectorQueryBuilder.newBuilder().constraintsFactory().spatialTemporalConstraints().spatialConstraints(GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(-172, 172, -82, 82))).addTimeRange(new Date(startTime), new Date(endTime)).build()).build())) {
Assert.assertEquals(2178 * NUM_PROCESSES, Iterators.size(it));
}
TestUtils.deleteAll(getDataStorePluginOptions());
}
}
use of org.locationtech.geowave.core.store.operations.MetadataType in project geowave by locationtech.
the class AbstractMapReduceIngest method run.
@Override
public int run(final String[] args) throws Exception {
final Configuration conf = getConf();
conf.set(INGEST_PLUGIN_KEY, ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(ingestPlugin)));
final VisibilityHandler visibilityHandler = visibilityOptions.getConfiguredVisibilityHandler();
if (visibilityHandler != null) {
conf.set(GLOBAL_VISIBILITY_KEY, ByteArrayUtils.byteArrayToString(PersistenceUtils.toBinary(visibilityHandler)));
}
final Job job = new Job(conf, getJobName());
final StringBuilder indexNames = new StringBuilder();
final List<Index> indexes = new ArrayList<>();
for (final Index primaryIndex : indices) {
indexes.add(primaryIndex);
if (primaryIndex != null) {
// add index
GeoWaveOutputFormat.addIndex(job.getConfiguration(), primaryIndex);
if (indexNames.length() != 0) {
indexNames.append(",");
}
indexNames.append(primaryIndex.getName());
}
}
job.getConfiguration().set(INDEX_NAMES_KEY, indexNames.toString());
job.setJarByClass(AbstractMapReduceIngest.class);
job.setInputFormatClass(AvroKeyInputFormat.class);
AvroJob.setInputKeySchema(job, parentPlugin.getAvroSchema());
FileInputFormat.setInputPaths(job, inputFile);
setupMapper(job);
setupReducer(job);
// set geowave output format
job.setOutputFormatClass(GeoWaveOutputFormat.class);
GeoWaveOutputFormat.setStoreOptions(job.getConfiguration(), dataStoreOptions);
final DataStore store = dataStoreOptions.createDataStore();
final PersistentAdapterStore adapterStore = dataStoreOptions.createAdapterStore();
final InternalAdapterStore internalAdapterStore = dataStoreOptions.createInternalAdapterStore();
final DataTypeAdapter<?>[] dataAdapters = ingestPlugin.getDataAdapters();
final Index[] indices = indexes.toArray(new Index[indexes.size()]);
if ((dataAdapters != null) && (dataAdapters.length > 0)) {
for (final DataTypeAdapter<?> dataAdapter : dataAdapters) {
// metadata for which there is no data
try {
store.addType(dataAdapter, visibilityOptions.getConfiguredVisibilityHandler(), Lists.newArrayList(), indices);
final short adapterId = internalAdapterStore.getAdapterId(dataAdapter.getTypeName());
final InternalDataAdapter<?> internalAdapter = adapterStore.getAdapter(adapterId);
GeoWaveOutputFormat.addDataAdapter(job.getConfiguration(), internalAdapter);
} catch (IllegalArgumentException e) {
// Skip any adapters that can't be mapped to the input indices
}
}
} else {
// indices from the client
for (final Index index : indices) {
store.addIndex(index);
}
if (indices.length > 0) {
for (final MetadataType type : MetadataType.values()) {
// stats and index metadata writers are created elsewhere
if (!MetadataType.INDEX.equals(type) && !MetadataType.STATISTIC_VALUES.equals(type)) {
dataStoreOptions.createDataStoreOperations().createMetadataWriter(type).close();
}
}
}
}
// distributed ingest
if (dataStoreOptions.getFactoryOptions().getStoreOptions().isPersistDataStatistics()) {
dataStoreOptions.createDataStoreOperations().createMetadataWriter(MetadataType.STATISTIC_VALUES).close();
}
job.setSpeculativeExecution(false);
// add required indices
final Index[] requiredIndices = parentPlugin.getRequiredIndices();
if (requiredIndices != null) {
for (final Index requiredIndex : requiredIndices) {
GeoWaveOutputFormat.addIndex(job.getConfiguration(), requiredIndex);
}
}
final int retVal = job.waitForCompletion(true) ? 0 : -1;
// ingests
if ((dataAdapters != null) && (dataAdapters.length > 0)) {
AdapterIndexMappingStore adapterIndexMappingStore = null;
for (final DataTypeAdapter<?> dataAdapter : dataAdapters) {
final String typeName = dataAdapter.getTypeName();
try (CloseableIterator<?> it = store.query(QueryBuilder.newBuilder().addTypeName(typeName).limit(1).build())) {
if (!it.hasNext()) {
if (adapterIndexMappingStore == null) {
adapterIndexMappingStore = dataStoreOptions.createAdapterIndexMappingStore();
}
final Short adapterId = internalAdapterStore.getAdapterId(typeName);
if (adapterId != null) {
internalAdapterStore.remove(adapterId);
adapterStore.removeAdapter(adapterId);
adapterIndexMappingStore.remove(adapterId);
}
}
}
}
}
return retVal;
}
use of org.locationtech.geowave.core.store.operations.MetadataType in project geowave by locationtech.
the class CassandraMetadataReader method query.
@Override
public CloseableIterator<GeoWaveMetadata> query(final MetadataQuery query) {
final String tableName = operations.getMetadataTableName(metadataType);
final String[] selectedColumns = metadataType.isStatValues() ? ArrayUtils.add(getSelectedColumns(query), CassandraMetadataWriter.VISIBILITY_KEY) : getSelectedColumns(query);
Predicate<Row> clientFilter = null;
if (query.isPrefix()) {
if (query.hasPrimaryId()) {
clientFilter = new PrimaryIDPrefixFilter(query.getPrimaryId());
}
}
final Iterator<Row> rows;
if (!query.hasPrimaryIdRanges()) {
Select select = operations.getSelect(tableName, selectedColumns);
if (query.hasPrimaryId() && query.isExact()) {
select = select.whereColumn(CassandraMetadataWriter.PRIMARY_ID_KEY).isEqualTo(QueryBuilder.literal(ByteBuffer.wrap(query.getPrimaryId())));
if (query.hasSecondaryId()) {
select = select.whereColumn(CassandraMetadataWriter.SECONDARY_ID_KEY).isEqualTo(QueryBuilder.literal(ByteBuffer.wrap(query.getSecondaryId())));
}
} else if (query.hasSecondaryId()) {
select = select.allowFiltering().whereColumn(CassandraMetadataWriter.SECONDARY_ID_KEY).isEqualTo(QueryBuilder.literal(ByteBuffer.wrap(query.getSecondaryId())));
}
final ResultSet rs = operations.getSession().execute(select.build());
rows = rs.iterator();
} else {
rows = Iterators.concat(Arrays.stream(query.getPrimaryIdRanges()).map((r) -> {
// TODO this is not as efficient as prepared bound statements if there are many
// ranges, but will work for now
Select select = operations.getSelect(tableName, selectedColumns);
if (r.getStart() != null) {
select = select.allowFiltering().whereColumn(CassandraMetadataWriter.PRIMARY_ID_KEY).isGreaterThanOrEqualTo(QueryBuilder.literal(ByteBuffer.wrap(r.getStart())));
}
if (r.getEnd() != null) {
select = select.allowFiltering().whereColumn(CassandraMetadataWriter.PRIMARY_ID_KEY).isLessThan(QueryBuilder.literal(ByteBuffer.wrap(r.getEndAsNextPrefix())));
}
final ResultSet rs = operations.getSession().execute(select.build());
return rs.iterator();
}).iterator());
}
final CloseableIterator<GeoWaveMetadata> retVal = new CloseableIterator.Wrapper<>(Iterators.transform(clientFilter != null ? Iterators.filter(rows, clientFilter) : rows, result -> new GeoWaveMetadata((query.hasPrimaryId() && query.isExact()) ? query.getPrimaryId() : result.get(CassandraMetadataWriter.PRIMARY_ID_KEY, ByteBuffer.class).array(), useSecondaryId(query) ? query.getSecondaryId() : result.get(CassandraMetadataWriter.SECONDARY_ID_KEY, ByteBuffer.class).array(), getVisibility(query, result), result.get(CassandraMetadataWriter.VALUE_KEY, ByteBuffer.class).array())));
return query.getAuthorizations() != null ? MetadataIterators.clientVisibilityFilter(retVal, query.getAuthorizations()) : retVal;
}
use of org.locationtech.geowave.core.store.operations.MetadataType in project geowave by locationtech.
the class GeoWaveStabilityIT method copyBadData.
@SuppressWarnings({ "unchecked", "rawtypes" })
private void copyBadData(final boolean badMetadata) throws Exception {
final DataStoreOperations badStoreOperations = badDataStore.createDataStoreOperations();
final DataStoreOperations storeOperations = dataStore.createDataStoreOperations();
final PersistentAdapterStore adapterStore = dataStore.createAdapterStore();
final InternalAdapterStore internalAdapterStore = dataStore.createInternalAdapterStore();
final AdapterIndexMappingStore indexMappingStore = dataStore.createAdapterIndexMappingStore();
final IndexStore indexStore = dataStore.createIndexStore();
for (final MetadataType metadataType : MetadataType.values()) {
try (MetadataWriter writer = badStoreOperations.createMetadataWriter(metadataType)) {
final MetadataReader reader = storeOperations.createMetadataReader(metadataType);
try (CloseableIterator<GeoWaveMetadata> it = reader.query(new MetadataQuery(null, null))) {
while (it.hasNext()) {
if (badMetadata) {
writer.write(new BadGeoWaveMetadata(it.next()));
} else {
writer.write(it.next());
}
}
}
} catch (final Exception e) {
LOGGER.error("Unable to write metadata on copy", e);
}
}
final InternalDataAdapter<?>[] adapters = adapterStore.getAdapters();
for (final InternalDataAdapter<?> adapter : adapters) {
for (final AdapterToIndexMapping indexMapping : indexMappingStore.getIndicesForAdapter(adapter.getAdapterId())) {
final boolean rowMerging = BaseDataStoreUtils.isRowMerging(adapter);
final Index index = indexMapping.getIndex(indexStore);
final ReaderParamsBuilder bldr = new ReaderParamsBuilder(index, adapterStore, indexMappingStore, internalAdapterStore, GeoWaveRowIteratorTransformer.NO_OP_TRANSFORMER);
bldr.adapterIds(new short[] { adapter.getAdapterId() });
bldr.isClientsideRowMerging(rowMerging);
try (RowReader<GeoWaveRow> reader = storeOperations.createReader(bldr.build())) {
try (RowWriter writer = badStoreOperations.createWriter(index, adapter)) {
while (reader.hasNext()) {
if (!badMetadata) {
writer.write(new BadGeoWaveRow(reader.next()));
} else {
writer.write(reader.next());
}
}
}
} catch (final Exception e) {
LOGGER.error("Unable to write metadata on copy", e);
}
}
}
try {
badDataStore.createDataStatisticsStore().mergeStats();
} catch (final Exception e) {
LOGGER.info("Caught exception while merging bad stats.");
}
}
use of org.locationtech.geowave.core.store.operations.MetadataType in project geowave by locationtech.
the class RocksDBClient method getMetadataTable.
public synchronized RocksDBMetadataTable getMetadataTable(final MetadataType type) {
if (metadataOptions == null) {
RocksDB.loadLibrary();
metadataOptions = new Options().setCreateIfMissing(true).optimizeForSmallDb();
}
final String directory = subDirectory + "/" + type.id();
return metadataTableCache.get(keyCache.get(directory, d -> new CacheKey(d, type.isStatValues())));
}
Aggregations