use of org.locationtech.geowave.core.ingest.spark.SparkIngestDriver in project geowave by locationtech.
the class TestUtils method testSparkIngest.
public static void testSparkIngest(final DataStorePluginOptions dataStore, final DimensionalityType dimensionalityType, final String s3Url, final String ingestFilePath, final String format) throws Exception {
// ingest a shapefile (geotools type) directly into GeoWave using the
// ingest framework's main method and pre-defined commandline arguments
// Indexes
final String indexes = dimensionalityType.getDimensionalityArg();
final File configFile = File.createTempFile("test_spark_ingest", null);
final ManualOperationParams operationParams = new ManualOperationParams();
operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);
final ConfigAWSCommand configS3 = new ConfigAWSCommand();
configS3.setS3UrlParameter(s3Url);
configS3.execute(operationParams);
final LocalInputCommandLineOptions localOptions = new LocalInputCommandLineOptions();
localOptions.setFormats(format);
final SparkCommandLineOptions sparkOptions = new SparkCommandLineOptions();
sparkOptions.setAppName("SparkIngestTest");
sparkOptions.setMaster("local");
sparkOptions.setHost("localhost");
// Create the command and execute.
final SparkIngestDriver sparkIngester = new SparkIngestDriver();
final Properties props = new Properties();
dataStore.save(props, DataStorePluginOptions.getStoreNamespace("test"));
final AddStoreCommand addStore = new AddStoreCommand();
addStore.setParameters("test");
addStore.setPluginOptions(dataStore);
addStore.execute(operationParams);
final IndexStore indexStore = dataStore.createIndexStore();
final org.locationtech.geowave.core.store.api.DataStore geowaveDataStore = dataStore.createDataStore();
final String[] indexTypes = dimensionalityType.getDimensionalityArg().split(",");
for (final String indexType : indexTypes) {
if (indexStore.getIndex(indexType) == null) {
final IndexPluginOptions pluginOptions = new IndexPluginOptions();
pluginOptions.selectPlugin(indexType);
pluginOptions.setName(indexType);
pluginOptions.save(props, IndexPluginOptions.getIndexNamespace(indexType));
geowaveDataStore.addIndex(pluginOptions.createIndex(geowaveDataStore));
}
}
props.setProperty(ConfigAWSCommand.AWS_S3_ENDPOINT_URL, s3Url);
sparkIngester.runOperation(configFile, localOptions, "test", indexes, new VisibilityOptions(), sparkOptions, ingestFilePath, new JCommander().getConsole());
verifyStats(dataStore);
}
use of org.locationtech.geowave.core.ingest.spark.SparkIngestDriver in project geowave by locationtech.
the class SparkToGeoWaveCommand method computeResults.
@Override
public Void computeResults(final OperationParams params) throws Exception {
// Ensure we have all the required arguments
if (parameters.size() != 3) {
throw new ParameterException("Requires arguments: <file or directory> <store name> <comma delimited index list>");
}
final String inputPath = parameters.get(0);
final String inputStoreName = parameters.get(1);
final String indexList = parameters.get(2);
// Config file
final File configFile = getGeoWaveConfigFile(params);
// Driver
final SparkIngestDriver driver = new SparkIngestDriver();
// Execute
if (!driver.runOperation(configFile, localInputOptions, inputStoreName, indexList, ingestOptions, sparkOptions, inputPath, params.getConsole())) {
throw new RuntimeException("Ingest failed to execute");
}
return null;
}
use of org.locationtech.geowave.core.ingest.spark.SparkIngestDriver in project geowave by locationtech.
the class DefaultGeoWaveAWSCredentialsProviderTest method testAnonymousAccess.
@Test
public void testAnonymousAccess() throws NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, URISyntaxException, IOException {
final File temp = File.createTempFile("temp", Long.toString(System.nanoTime()));
temp.mkdirs();
final S3Mock mockS3 = new S3Mock.Builder().withPort(8001).withFileBackend(temp.getAbsolutePath()).withInMemoryBackend().build();
mockS3.start();
URLIngestUtils.setURLStreamHandlerFactory(URLTYPE.S3);
final SparkIngestDriver sparkDriver = new SparkIngestDriver();
final S3FileSystem s3 = sparkDriver.initializeS3FS("s3://s3.amazonaws.com");
s3.getClient().setEndpoint("http://127.0.0.1:8001");
s3.getClient().createBucket("testbucket");
s3.getClient().putObject("testbucket", "test", "content");
try (Stream<Path> s = Files.list(URLIngestUtils.setupS3FileSystem("s3://testbucket/", "s3://s3.amazonaws.com"))) {
Assert.assertEquals(1, s.count());
}
mockS3.shutdown();
}
Aggregations