use of org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand in project geowave by locationtech.
the class MapReduceTestUtils method testMapReduceIngest.
public static void testMapReduceIngest(final DataStorePluginOptions dataStore, final DimensionalityType dimensionalityType, final String format, final String ingestFilePath) throws Exception {
// ingest gpx data directly into GeoWave using the
// ingest framework's main method and pre-defined commandline arguments
LOGGER.warn("Ingesting '" + ingestFilePath + "' - this may take several minutes...");
final Thread progressLogger = startProgressLogger();
// Indexes
final String[] indexTypes = dimensionalityType.getDimensionalityArg().split(",");
final List<IndexPluginOptions> indexOptions = new ArrayList<>(indexTypes.length);
for (final String indexType : indexTypes) {
final IndexPluginOptions indexOption = new IndexPluginOptions();
indexOption.selectPlugin(indexType);
indexOptions.add(indexOption);
}
// Ingest Formats
final MapReduceTestEnvironment env = MapReduceTestEnvironment.getInstance();
final IngestFormatPluginOptions ingestFormatOptions = new IngestFormatPluginOptions();
ingestFormatOptions.selectPlugin(format);
// create temporary config file and use it for hdfs FS URL config
final File configFile = File.createTempFile("test_mr", null);
final ManualOperationParams operationParams = new ManualOperationParams();
operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);
final ConfigHDFSCommand configHdfs = new ConfigHDFSCommand();
configHdfs.setHdfsUrlParameter(env.getHdfs());
configHdfs.execute(operationParams);
final LocalToMapReduceToGeoWaveCommand mrGw = new LocalToMapReduceToGeoWaveCommand();
final AddStoreCommand addStore = new AddStoreCommand();
addStore.setParameters("test-store");
addStore.setPluginOptions(dataStore);
addStore.execute(operationParams);
final IndexStore indexStore = dataStore.createIndexStore();
final DataStore geowaveDataStore = dataStore.createDataStore();
final StringBuilder indexParam = new StringBuilder();
for (int i = 0; i < indexOptions.size(); i++) {
String indexName = "testIndex" + i;
if (indexStore.getIndex(indexName) == null) {
indexOptions.get(i).setName(indexName);
geowaveDataStore.addIndex(indexOptions.get(i).createIndex(geowaveDataStore));
}
indexParam.append(indexName + ",");
}
mrGw.setPluginFormats(ingestFormatOptions);
mrGw.setParameters(ingestFilePath, env.getHdfsBaseDirectory(), "test-store", indexParam.toString());
mrGw.getMapReduceOptions().setJobTrackerHostPort(env.getJobtracker());
mrGw.execute(operationParams);
progressLogger.interrupt();
}
use of org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand in project geowave by locationtech.
the class GeoWaveGrpcCoreMapreduceService method configHDFSCommand.
@Override
public void configHDFSCommand(final org.locationtech.geowave.service.grpc.protobuf.ConfigHDFSCommandParametersProtos request, final StreamObserver<org.locationtech.geowave.service.grpc.protobuf.GeoWaveReturnTypesProtos.VoidResponseProtos> responseObserver) {
final ConfigHDFSCommand cmd = new ConfigHDFSCommand();
final Map<FieldDescriptor, Object> m = request.getAllFields();
GeoWaveGrpcServiceCommandUtil.setGrpcToCommandFields(m, cmd);
final File configFile = GeoWaveGrpcServiceOptions.geowaveConfigFile;
final OperationParams params = new ManualOperationParams();
params.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);
cmd.prepare(params);
LOGGER.info("Executing ConfigHDFSCommand...");
try {
cmd.computeResults(params);
final VoidResponseProtos resp = VoidResponseProtos.newBuilder().build();
responseObserver.onNext(resp);
responseObserver.onCompleted();
} catch (final Exception e) {
LOGGER.error("Exception encountered executing command", e);
responseObserver.onError(e);
}
}
use of org.locationtech.geowave.mapreduce.operations.ConfigHDFSCommand in project geowave by locationtech.
the class MapReduceTestEnvironment method setup.
@Override
public void setup() throws Exception {
hdfs = System.getProperty("hdfs");
jobtracker = System.getProperty("jobtracker");
if (!TestUtils.isSet(hdfs)) {
hdfs = "file:///";
hdfsBaseDirectory = TestUtils.TEMP_DIR.toURI().toURL().toString() + "/" + HDFS_BASE_DIRECTORY;
hdfsProtocol = false;
// create temporary config file and use it for hdfs FS URL config
configFile = File.createTempFile("test_mr", null);
operationParams = new ManualOperationParams();
operationParams.getContext().put(ConfigOptions.PROPERTIES_FILE_CONTEXT, configFile);
final ConfigHDFSCommand configHdfs = new ConfigHDFSCommand();
configHdfs.setHdfsUrlParameter(hdfs);
configHdfs.execute(operationParams);
} else {
hdfsBaseDirectory = HDFS_BASE_DIRECTORY;
if (!hdfs.contains("://")) {
hdfs = "hdfs://" + hdfs;
hdfsProtocol = true;
} else {
hdfsProtocol = hdfs.toLowerCase(Locale.ENGLISH).startsWith("hdfs://");
}
}
if (KerberosTestEnvironment.useKerberos()) {
if (!KerberosTestEnvironment.getInstance().isRunning()) {
KerberosTestEnvironment.getInstance().setup();
}
Configuration kerberosConfig = new Configuration(false);
kerberosConfigFile = new File("./target/test-classes/kerberos-config.xml");
kerberosConfig.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
ClusterUser user = KerberosTestEnvironment.getInstance().getRootUser();
kerberosConfig.set(YarnConfiguration.RM_PRINCIPAL, user.getPrincipal());
kerberosConfig.set(YarnConfiguration.RM_KEYTAB, user.getKeytab().getAbsolutePath());
TestUtils.writeConfigToFile(kerberosConfigFile, kerberosConfig);
Configuration.addDefaultResource(kerberosConfigFile.getName());
}
if (!TestUtils.isSet(jobtracker)) {
jobtracker = DEFAULT_JOB_TRACKER;
}
}
Aggregations