use of org.apache.metron.pcap.config.QueryPcapConfig in project metron by apache.
the class QueryCliParser method parse.
/**
* Parses query pcap filter options and required parameters common to all filter types.
*
* @param args command line arguments to parse
* @return Configuration tailored to query pcap queries
* @throws ParseException
*/
public QueryPcapConfig parse(String[] args) throws ParseException, java.text.ParseException {
CommandLine commandLine = getParser().parse(queryOptions, args);
QueryPcapConfig config = new QueryPcapConfig(prefixStrategy);
super.parse(commandLine, config);
if (commandLine.hasOption("query")) {
config.setQuery(commandLine.getOptionValue("query"));
}
if (commandLine.hasOption("prefix")) {
config.setFinalFilenamePrefix(commandLine.getOptionValue("prefix"));
}
return config;
}
use of org.apache.metron.pcap.config.QueryPcapConfig in project metron by apache.
the class PcapCli method run.
public int run(String[] args) {
if (args.length < 1) {
printBasicHelp();
return -1;
}
String jobType = args[0];
String[] commandArgs = Arrays.copyOfRange(args, 1, args.length);
Configuration hadoopConf = new Configuration();
String[] otherArgs = null;
try {
otherArgs = new GenericOptionsParser(hadoopConf, commandArgs).getRemainingArgs();
} catch (IOException e) {
LOGGER.error("Failed to configure hadoop with provided options: {}", e.getMessage(), e);
return -1;
}
PcapConfig commonConfig = null;
Pageable<Path> results;
// write to local FS in the executing directory
String execDir = System.getProperty("user.dir");
if ("fixed".equals(jobType)) {
FixedCliParser fixedParser = new FixedCliParser(prefixStrategy);
FixedPcapConfig config = null;
try {
config = fixedParser.parse(otherArgs);
commonConfig = config;
PcapOptions.FINAL_OUTPUT_PATH.put(commonConfig, new Path(execDir));
} catch (ParseException | java.text.ParseException e) {
System.err.println(e.getMessage());
System.err.flush();
fixedParser.printHelp();
return -1;
}
if (config.showHelp()) {
fixedParser.printHelp();
return 0;
}
PcapOptions.FILTER_IMPL.put(commonConfig, new FixedPcapFilter.Configurator());
config.getYarnQueue().ifPresent(s -> hadoopConf.set(MRJobConfig.QUEUE_NAME, s));
PcapOptions.HADOOP_CONF.put(commonConfig, hadoopConf);
try {
PcapOptions.FILESYSTEM.put(commonConfig, FileSystem.get(hadoopConf));
results = jobRunner.submit(PcapFinalizerStrategies.CLI, commonConfig).get();
} catch (IOException | InterruptedException | JobException e) {
LOGGER.error("Failed to execute fixed filter job: {}", e.getMessage(), e);
return -1;
}
} else if ("query".equals(jobType)) {
QueryCliParser queryParser = new QueryCliParser(prefixStrategy);
QueryPcapConfig config = null;
try {
config = queryParser.parse(otherArgs);
commonConfig = config;
PcapOptions.FINAL_OUTPUT_PATH.put(commonConfig, new Path(execDir));
} catch (ParseException | java.text.ParseException e) {
System.err.println(e.getMessage());
queryParser.printHelp();
return -1;
}
if (config.showHelp()) {
queryParser.printHelp();
return 0;
}
PcapOptions.FILTER_IMPL.put(commonConfig, new FixedPcapFilter.Configurator());
config.getYarnQueue().ifPresent(s -> hadoopConf.set(MRJobConfig.QUEUE_NAME, s));
PcapOptions.HADOOP_CONF.put(commonConfig, hadoopConf);
try {
PcapOptions.FILESYSTEM.put(commonConfig, FileSystem.get(hadoopConf));
results = jobRunner.submit(PcapFinalizerStrategies.CLI, commonConfig).get();
} catch (IOException | InterruptedException | JobException e) {
LOGGER.error("Failed to execute fixed filter job: {}", e.getMessage(), e);
return -1;
}
} else {
printBasicHelp();
return -1;
}
return 0;
}
Aggregations