use of org.apache.metron.pcap.PcapPages in project metron by apache.
the class PcapJobTest method setup.
@BeforeEach
public void setup() throws IOException {
MockitoAnnotations.initMocks(this);
basePath = new Path("basepath");
baseOutPath = new Path("outpath");
startTime = 100;
endTime = 200;
numReducers = 5;
numRecordsPerFile = 5;
fixedFields = new HashMap<>();
fixedFields.put("ip_src_addr", "192.168.1.1");
hadoopConfig = new Configuration();
fileSystem = FileSystem.get(hadoopConfig);
finalOutputPath = new Path("finaloutpath");
when(jobId.toString()).thenReturn(jobIdVal);
when(mrStatus.getJobID()).thenReturn(jobId);
when(mrJob.getJobID()).thenReturn(jobId);
pageableResult = new PcapPages();
timer = new TestTimer();
// handles setting the file name prefix under the hood
config = new FixedPcapConfig(clock -> "clockprefix");
PcapOptions.HADOOP_CONF.put(config, hadoopConfig);
PcapOptions.FILESYSTEM.put(config, FileSystem.get(hadoopConfig));
PcapOptions.BASE_PATH.put(config, basePath);
PcapOptions.BASE_INTERIM_RESULT_PATH.put(config, baseOutPath);
PcapOptions.START_TIME_NS.put(config, startTime);
PcapOptions.END_TIME_NS.put(config, endTime);
PcapOptions.NUM_REDUCERS.put(config, numReducers);
PcapOptions.FIELDS.put(config, fixedFields);
PcapOptions.FILTER_IMPL.put(config, new FixedPcapFilter.Configurator());
PcapOptions.NUM_RECORDS_PER_FILE.put(config, numRecordsPerFile);
PcapOptions.FINAL_OUTPUT_PATH.put(config, finalOutputPath);
testJob = new TestJob<>(mrJob);
testJob.setStatusInterval(1);
testJob.setCompleteCheckInterval(1);
testJob.setTimer(timer);
}
use of org.apache.metron.pcap.PcapPages in project metron by apache.
the class PcapJobTest method job_succeeds_synchronously.
@Test
public void job_succeeds_synchronously() throws Exception {
pageableResult = new PcapPages(Arrays.asList(new Path("1.txt"), new Path("2.txt"), new Path("3.txt")));
when(finalizer.finalizeJob(any())).thenReturn(pageableResult);
when(mrJob.isComplete()).thenReturn(true);
when(mrStatus.getState()).thenReturn(org.apache.hadoop.mapreduce.JobStatus.State.SUCCEEDED);
when(mrJob.getStatus()).thenReturn(mrStatus);
Statusable<Path> statusable = testJob.submit(finalizer, config);
timer.updateJobStatus();
Pageable<Path> results = statusable.get();
assertThat(results.getSize(), equalTo(3));
JobStatus status = statusable.getStatus();
assertThat(status.getState(), equalTo(State.SUCCEEDED));
assertThat(status.getPercentComplete(), equalTo(100.0));
assertThat(status.getJobId(), equalTo(jobIdVal));
}
use of org.apache.metron.pcap.PcapPages in project metron by apache.
the class PcapJobTest method handles_null_values_with_defaults.
@Test
public void handles_null_values_with_defaults() throws Exception {
PcapOptions.START_TIME_NS.put(config, null);
PcapOptions.END_TIME_NS.put(config, null);
PcapOptions.NUM_REDUCERS.put(config, null);
PcapOptions.NUM_RECORDS_PER_FILE.put(config, null);
pageableResult = new PcapPages(Arrays.asList(new Path("1.txt"), new Path("2.txt"), new Path("3.txt")));
when(finalizer.finalizeJob(any())).thenReturn(pageableResult);
when(mrJob.isComplete()).thenReturn(true);
when(mrStatus.getState()).thenReturn(org.apache.hadoop.mapreduce.JobStatus.State.SUCCEEDED);
when(mrJob.getStatus()).thenReturn(mrStatus);
Statusable<Path> statusable = testJob.submit(finalizer, config);
timer.updateJobStatus();
Pageable<Path> results = statusable.get();
assertThat(results.getSize(), equalTo(3));
JobStatus status = statusable.getStatus();
assertThat(status.getState(), equalTo(State.SUCCEEDED));
assertThat(status.getPercentComplete(), equalTo(100.0));
assertThat(status.getJobId(), equalTo(jobIdVal));
}
use of org.apache.metron.pcap.PcapPages in project metron by apache.
the class PcapControllerIntegrationTest method testRawDownload.
@Test
public void testRawDownload() throws Exception {
String pcapFileContents = "pcap file contents";
FileUtils.write(new File("./target/pcapFile"), pcapFileContents, "UTF8");
MockPcapJob mockPcapJob = (MockPcapJob) wac.getBean("mockPcapJob");
mockPcapJob.setStatus(new JobStatus().withJobId("jobId").withState(JobStatus.State.RUNNING));
this.mockMvc.perform(post(pcapUrl + "/fixed").with(httpBasic(user, password)).with(csrf()).contentType(MediaType.parseMediaType("application/json;charset=UTF-8")).content(fixedJson)).andExpect(status().isOk()).andExpect(content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))).andExpect(jsonPath("$.jobId").value("jobId")).andExpect(jsonPath("$.jobStatus").value("RUNNING"));
Pageable<Path> pageable = new PcapPages(Arrays.asList(new Path("./target/pcapFile")));
mockPcapJob.setIsDone(true);
mockPcapJob.setPageable(pageable);
this.mockMvc.perform(get(pcapUrl + "/jobId/raw?page=1").with(httpBasic(user, password))).andExpect(status().isOk()).andExpect(header().string("Content-Disposition", "attachment; filename=\"pcap_jobId_1.pcap\"")).andExpect(header().string("Content-Length", Integer.toString(pcapFileContents.length()))).andExpect(content().contentType(MediaType.parseMediaType("application/octet-stream"))).andExpect(content().bytes(pcapFileContents.getBytes(StandardCharsets.UTF_8)));
this.mockMvc.perform(get(pcapUrl + "/jobId/raw?page=1&fileName=pcapFile.pcap").with(httpBasic(user, password))).andExpect(status().isOk()).andExpect(header().string("Content-Disposition", "attachment; filename=\"pcapFile.pcap\"")).andExpect(header().string("Content-Length", Integer.toString(pcapFileContents.length()))).andExpect(content().contentType(MediaType.parseMediaType("application/octet-stream"))).andExpect(content().bytes(pcapFileContents.getBytes(StandardCharsets.UTF_8)));
this.mockMvc.perform(get(pcapUrl + "/jobId/raw?page=2").with(httpBasic(user, password))).andExpect(status().isNotFound());
}
use of org.apache.metron.pcap.PcapPages in project metron by apache.
the class PcapFinalizer method finalizeJob.
@Override
public Pageable<Path> finalizeJob(Map<String, Object> config) throws JobException {
Configuration hadoopConfig = PcapOptions.HADOOP_CONF.get(config, Configuration.class);
int recPerFile = PcapOptions.NUM_RECORDS_PER_FILE.getOrDefault(config, Integer.class, NUM_RECORDS_PER_FILE_DEFAULT);
Path interimResultPath = PcapOptions.INTERIM_RESULT_PATH.get(config, PcapOptions.STRING_TO_PATH, Path.class);
FileSystem fs = PcapOptions.FILESYSTEM.get(config, FileSystem.class);
int parallelism = getNumThreads(PcapOptions.FINALIZER_THREADPOOL_SIZE.get(config, String.class));
LOG.info("Finalizer running with parallelism set to " + parallelism);
SequenceFileIterable interimResults = null;
try {
interimResults = readInterimResults(interimResultPath, hadoopConfig, fs);
} catch (IOException e) {
throw new JobException("Unable to read interim job results while finalizing", e);
}
List<Path> outFiles = new ArrayList<>();
try {
Iterable<List<byte[]>> partitions = Iterables.partition(interimResults, recPerFile);
Map<Path, List<byte[]>> toWrite = new HashMap<>();
int part = 1;
if (partitions.iterator().hasNext()) {
for (List<byte[]> data : partitions) {
Path outputPath = getOutputPath(config, part++);
toWrite.put(outputPath, data);
}
outFiles = writeParallel(hadoopConfig, toWrite, parallelism);
} else {
LOG.info("No results returned.");
}
} catch (IOException e) {
throw new JobException("Failed to finalize results", e);
} finally {
try {
interimResults.cleanup();
} catch (IOException e) {
LOG.warn("Unable to cleanup files in HDFS", e);
}
}
LOG.info("Done finalizing results");
return new PcapPages(outFiles);
}
Aggregations