use of com.splunk.JobExportArgs in project nifi by apache.
the class TestGetSplunk method testGetWithManagedFromBeginning.
@Test
public void testGetWithManagedFromBeginning() throws ParseException {
final String query = "search tcp:7879";
final String outputMode = GetSplunk.ATOM_VALUE.getValue();
runner.setProperty(GetSplunk.QUERY, query);
runner.setProperty(GetSplunk.OUTPUT_MODE, outputMode);
runner.setProperty(GetSplunk.TIME_RANGE_STRATEGY, GetSplunk.MANAGED_BEGINNING_VALUE.getValue());
final String resultContent = "fake results";
final ByteArrayInputStream input = new ByteArrayInputStream(resultContent.getBytes(StandardCharsets.UTF_8));
when(service.export(eq(query), any(JobExportArgs.class))).thenReturn(input);
// run once and don't shut down
runner.run(1, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 1);
// capture what the args were on last run
final ArgumentCaptor<JobExportArgs> capture1 = ArgumentCaptor.forClass(JobExportArgs.class);
verify(service, times(1)).export(eq(query), capture1.capture());
// first execution with no previous state and "managed from beginning" should have a latest time and no earliest time
final JobExportArgs actualArgs1 = capture1.getValue();
Assert.assertNotNull(actualArgs1);
Assert.assertNull(actualArgs1.get("earliest_time"));
Assert.assertNotNull(actualArgs1.get("latest_time"));
// save the latest time from the first run which should be earliest time of next run
final String lastLatest = (String) actualArgs1.get("latest_time");
final SimpleDateFormat format = new SimpleDateFormat(GetSplunk.DATE_TIME_FORMAT);
format.setTimeZone(TimeZone.getTimeZone("UTC"));
final Date lastLatestDate = format.parse(lastLatest);
final String expectedLatest = format.format(new Date(lastLatestDate.getTime() + 1));
// run again
runner.run(1, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 2);
final ArgumentCaptor<JobExportArgs> capture2 = ArgumentCaptor.forClass(JobExportArgs.class);
verify(service, times(2)).export(eq(query), capture2.capture());
// second execution the earliest time should be the previous latest_time
final JobExportArgs actualArgs2 = capture2.getValue();
Assert.assertNotNull(actualArgs2);
Assert.assertEquals(expectedLatest, actualArgs2.get("earliest_time"));
Assert.assertNotNull(actualArgs2.get("latest_time"));
}
use of com.splunk.JobExportArgs in project nifi by apache.
the class TestGetSplunk method testGetWithManagedFromBeginningWithDifferentTimeZone.
@Test
public void testGetWithManagedFromBeginningWithDifferentTimeZone() throws ParseException {
final String query = "search tcp:7879";
final String outputMode = GetSplunk.ATOM_VALUE.getValue();
final TimeZone timeZone = TimeZone.getTimeZone("PST");
runner.setProperty(GetSplunk.QUERY, query);
runner.setProperty(GetSplunk.OUTPUT_MODE, outputMode);
runner.setProperty(GetSplunk.TIME_RANGE_STRATEGY, GetSplunk.MANAGED_BEGINNING_VALUE.getValue());
runner.setProperty(GetSplunk.TIME_ZONE, timeZone.getID());
final String resultContent = "fake results";
final ByteArrayInputStream input = new ByteArrayInputStream(resultContent.getBytes(StandardCharsets.UTF_8));
when(service.export(eq(query), any(JobExportArgs.class))).thenReturn(input);
// run once and don't shut down
runner.run(1, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 1);
// capture what the args were on last run
final ArgumentCaptor<JobExportArgs> capture1 = ArgumentCaptor.forClass(JobExportArgs.class);
verify(service, times(1)).export(eq(query), capture1.capture());
// first execution with no previous state and "managed from beginning" should have a latest time and no earliest time
final JobExportArgs actualArgs1 = capture1.getValue();
Assert.assertNotNull(actualArgs1);
Assert.assertNull(actualArgs1.get("earliest_time"));
Assert.assertNotNull(actualArgs1.get("latest_time"));
// save the latest time from the first run which should be earliest time of next run
final String lastLatest = (String) actualArgs1.get("latest_time");
final SimpleDateFormat format = new SimpleDateFormat(GetSplunk.DATE_TIME_FORMAT);
format.setTimeZone(timeZone);
final Date lastLatestDate = format.parse(lastLatest);
final String expectedLatest = format.format(new Date(lastLatestDate.getTime() + 1));
// run again
runner.run(1, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 2);
final ArgumentCaptor<JobExportArgs> capture2 = ArgumentCaptor.forClass(JobExportArgs.class);
verify(service, times(2)).export(eq(query), capture2.capture());
// second execution the earliest time should be the previous latest_time
final JobExportArgs actualArgs2 = capture2.getValue();
Assert.assertNotNull(actualArgs2);
Assert.assertEquals(expectedLatest, actualArgs2.get("earliest_time"));
Assert.assertNotNull(actualArgs2.get("latest_time"));
}
use of com.splunk.JobExportArgs in project nifi by apache.
the class TestGetSplunk method testGetWithManagedFromBeginningWithShutdown.
@Test
public void testGetWithManagedFromBeginningWithShutdown() throws ParseException {
final String query = "search tcp:7879";
final String outputMode = GetSplunk.ATOM_VALUE.getValue();
runner.setProperty(GetSplunk.QUERY, query);
runner.setProperty(GetSplunk.OUTPUT_MODE, outputMode);
runner.setProperty(GetSplunk.TIME_RANGE_STRATEGY, GetSplunk.MANAGED_BEGINNING_VALUE.getValue());
final String resultContent = "fake results";
final ByteArrayInputStream input = new ByteArrayInputStream(resultContent.getBytes(StandardCharsets.UTF_8));
when(service.export(eq(query), any(JobExportArgs.class))).thenReturn(input);
// run once and shut down
runner.run(1, true);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 1);
// capture what the args were on last run
final ArgumentCaptor<JobExportArgs> capture1 = ArgumentCaptor.forClass(JobExportArgs.class);
verify(service, times(1)).export(eq(query), capture1.capture());
// first execution with no previous state and "managed from beginning" should have a latest time and no earliest time
final JobExportArgs actualArgs1 = capture1.getValue();
Assert.assertNotNull(actualArgs1);
Assert.assertNull(actualArgs1.get("earliest_time"));
Assert.assertNotNull(actualArgs1.get("latest_time"));
// save the latest time from the first run which should be earliest time of next run
final String lastLatest = (String) actualArgs1.get("latest_time");
final SimpleDateFormat format = new SimpleDateFormat(GetSplunk.DATE_TIME_FORMAT);
format.setTimeZone(TimeZone.getTimeZone("UTC"));
final Date lastLatestDate = format.parse(lastLatest);
final String expectedLatest = format.format(new Date(lastLatestDate.getTime() + 1));
// run again
runner.run(1, true);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 2);
final ArgumentCaptor<JobExportArgs> capture2 = ArgumentCaptor.forClass(JobExportArgs.class);
verify(service, times(2)).export(eq(query), capture2.capture());
// second execution the earliest time should be the previous latest_time
final JobExportArgs actualArgs2 = capture2.getValue();
Assert.assertNotNull(actualArgs2);
Assert.assertEquals(expectedLatest, actualArgs2.get("earliest_time"));
Assert.assertNotNull(actualArgs2.get("latest_time"));
}
use of com.splunk.JobExportArgs in project nifi by apache.
the class TestGetSplunk method testGetWithManagedFromCurrentUsingEventTime.
@Test
public void testGetWithManagedFromCurrentUsingEventTime() throws IOException, ParseException {
final String query = "search tcp:7879";
final String outputMode = GetSplunk.ATOM_VALUE.getValue();
runner.setProperty(GetSplunk.QUERY, query);
runner.setProperty(GetSplunk.OUTPUT_MODE, outputMode);
runner.setProperty(GetSplunk.TIME_RANGE_STRATEGY, GetSplunk.MANAGED_CURRENT_VALUE.getValue());
final String resultContent = "fake results";
final ByteArrayInputStream input = new ByteArrayInputStream(resultContent.getBytes(StandardCharsets.UTF_8));
when(service.export(eq(query), any(JobExportArgs.class))).thenReturn(input);
// run once and don't shut down, shouldn't produce any results first time
runner.run(1, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 0);
// capture what the args were on last run
verify(service, times(0)).export(eq(query), any(JobExportArgs.class));
final StateMap state = runner.getStateManager().getState(Scope.CLUSTER);
Assert.assertNotNull(state);
Assert.assertTrue(state.getVersion() > 0);
// save the latest time from the first run which should be earliest time of next run
final String lastLatest = state.get(GetSplunk.LATEST_TIME_KEY);
final SimpleDateFormat format = new SimpleDateFormat(GetSplunk.DATE_TIME_FORMAT);
format.setTimeZone(TimeZone.getTimeZone("UTC"));
final Date lastLatestDate = format.parse(lastLatest);
final String expectedLatest = format.format(new Date(lastLatestDate.getTime() + 1));
// run again
runner.run(1, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 1);
final ArgumentCaptor<JobExportArgs> capture = ArgumentCaptor.forClass(JobExportArgs.class);
verify(service, times(1)).export(eq(query), capture.capture());
// second execution the earliest time should be the previous latest_time
final JobExportArgs actualArgs = capture.getValue();
Assert.assertNotNull(actualArgs);
Assert.assertEquals(expectedLatest, actualArgs.get("earliest_time"));
Assert.assertNotNull(actualArgs.get("latest_time"));
}
Aggregations