use of com.splunk.JobExportArgs in project nifi by apache.
the class GetSplunk method onTrigger.
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
final long currentTime = System.currentTimeMillis();
synchronized (isInitialized) {
if (!isInitialized.get()) {
splunkService = createSplunkService(context);
isInitialized.set(true);
}
}
final String query = context.getProperty(QUERY).getValue();
final String outputMode = context.getProperty(OUTPUT_MODE).getValue();
final String timeRangeStrategy = context.getProperty(TIME_RANGE_STRATEGY).getValue();
final String timeZone = context.getProperty(TIME_ZONE).getValue();
final String timeFieldStrategy = context.getProperty(TIME_FIELD_STRATEGY).getValue();
final JobExportArgs exportArgs = new JobExportArgs();
exportArgs.setSearchMode(JobExportArgs.SearchMode.NORMAL);
exportArgs.setOutputMode(JobExportArgs.OutputMode.valueOf(outputMode));
String earliestTime = null;
String latestTime = null;
if (PROVIDED_VALUE.getValue().equals(timeRangeStrategy)) {
// for provided we just use the values of the properties
earliestTime = context.getProperty(EARLIEST_TIME).getValue();
latestTime = context.getProperty(LATEST_TIME).getValue();
} else {
try {
// not provided so we need to check the previous state
final TimeRange previousRange = loadState(context.getStateManager());
final SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_TIME_FORMAT);
dateFormat.setTimeZone(TimeZone.getTimeZone(timeZone));
if (previousRange == null) {
// no previous state so set the earliest time based on the strategy
if (MANAGED_CURRENT_VALUE.getValue().equals(timeRangeStrategy)) {
earliestTime = dateFormat.format(new Date(currentTime));
}
// no previous state so set the latest time to the current time
latestTime = dateFormat.format(new Date(currentTime));
// initial time saved and next execution will be the first real execution
if (latestTime.equals(earliestTime)) {
saveState(context.getStateManager(), new TimeRange(earliestTime, latestTime));
return;
}
} else {
// we have previous state so set earliestTime to (latestTime + 1) of last range
try {
final String previousLastTime = previousRange.getLatestTime();
final Date previousLastDate = dateFormat.parse(previousLastTime);
earliestTime = dateFormat.format(new Date(previousLastDate.getTime() + 1));
latestTime = dateFormat.format(new Date(currentTime));
} catch (ParseException e) {
throw new ProcessException(e);
}
}
} catch (IOException e) {
getLogger().error("Unable to load data from State Manager due to {}", new Object[] { e.getMessage() }, e);
context.yield();
return;
}
}
if (!StringUtils.isBlank(earliestTime)) {
if (EVENT_TIME_VALUE.getValue().equalsIgnoreCase(timeFieldStrategy)) {
exportArgs.setEarliestTime(earliestTime);
} else {
exportArgs.setIndexEarliest(earliestTime);
}
}
if (!StringUtils.isBlank(latestTime)) {
if (EVENT_TIME_VALUE.getValue().equalsIgnoreCase(timeFieldStrategy)) {
exportArgs.setLatestTime(latestTime);
} else {
exportArgs.setIndexLatest(latestTime);
}
}
if (EVENT_TIME_VALUE.getValue().equalsIgnoreCase(timeFieldStrategy)) {
getLogger().debug("Using earliest_time of {} and latest_time of {}", new Object[] { earliestTime, latestTime });
} else {
getLogger().debug("Using index_earliest of {} and index_latest of {}", new Object[] { earliestTime, latestTime });
}
InputStream export;
try {
export = splunkService.export(query, exportArgs);
// Catch Stale connection exception, reinitialize, and retry
} catch (com.splunk.HttpException e) {
getLogger().error("Splunk request status code:" + e.getStatus() + " Retrying the request.");
splunkService.logout();
splunkService = createSplunkService(context);
export = splunkService.export(query, exportArgs);
}
final InputStream exportSearch = export;
FlowFile flowFile = session.create();
flowFile = session.write(flowFile, new OutputStreamCallback() {
@Override
public void process(OutputStream rawOut) throws IOException {
try (BufferedOutputStream out = new BufferedOutputStream(rawOut)) {
IOUtils.copyLarge(exportSearch, out);
}
}
});
final Map<String, String> attributes = new HashMap<>(3);
attributes.put(EARLIEST_TIME_ATTR, earliestTime);
attributes.put(LATEST_TIME_ATTR, latestTime);
attributes.put(QUERY_ATTR, query);
flowFile = session.putAllAttributes(flowFile, attributes);
session.getProvenanceReporter().receive(flowFile, transitUri);
session.transfer(flowFile, REL_SUCCESS);
getLogger().debug("Received {} from Splunk", new Object[] { flowFile });
// only need to do this for the managed time strategies
if (!PROVIDED_VALUE.getValue().equals(timeRangeStrategy)) {
try {
saveState(context.getStateManager(), new TimeRange(earliestTime, latestTime));
} catch (IOException e) {
getLogger().error("Unable to load data from State Manager due to {}", new Object[] { e.getMessage() }, e);
session.rollback();
context.yield();
}
}
}
use of com.splunk.JobExportArgs in project nifi by apache.
the class TestGetSplunk method testMultipleIterationsWithoutShuttingDown.
@Test
public void testMultipleIterationsWithoutShuttingDown() {
final String query = "search tcp:7879";
final String providedEarliest = "-1h";
final String providedLatest = "now";
final String outputMode = GetSplunk.ATOM_VALUE.getValue();
runner.setProperty(GetSplunk.QUERY, query);
runner.setProperty(GetSplunk.EARLIEST_TIME, providedEarliest);
runner.setProperty(GetSplunk.LATEST_TIME, providedLatest);
runner.setProperty(GetSplunk.OUTPUT_MODE, outputMode);
final JobExportArgs expectedArgs = new JobExportArgs();
expectedArgs.setSearchMode(JobExportArgs.SearchMode.NORMAL);
expectedArgs.setEarliestTime(providedEarliest);
expectedArgs.setLatestTime(providedLatest);
expectedArgs.setOutputMode(JobExportArgs.OutputMode.valueOf(outputMode));
final String resultContent = "fake results";
final ByteArrayInputStream input = new ByteArrayInputStream(resultContent.getBytes(StandardCharsets.UTF_8));
when(service.export(eq(query), argThat(new JobExportArgsMatcher(expectedArgs)))).thenReturn(input);
final int iterations = 3;
runner.run(iterations, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, iterations);
Assert.assertEquals(1, proc.count);
}
use of com.splunk.JobExportArgs in project nifi by apache.
the class TestGetSplunk method testGetWithProvidedTime.
@Test
public void testGetWithProvidedTime() {
final String query = "search tcp:7879";
final String providedEarliest = "-1h";
final String providedLatest = "now";
final String outputMode = GetSplunk.ATOM_VALUE.getValue();
runner.setProperty(GetSplunk.QUERY, query);
runner.setProperty(GetSplunk.EARLIEST_TIME, providedEarliest);
runner.setProperty(GetSplunk.LATEST_TIME, providedLatest);
runner.setProperty(GetSplunk.OUTPUT_MODE, outputMode);
final JobExportArgs expectedArgs = new JobExportArgs();
expectedArgs.setSearchMode(JobExportArgs.SearchMode.NORMAL);
expectedArgs.setEarliestTime(providedEarliest);
expectedArgs.setLatestTime(providedLatest);
expectedArgs.setOutputMode(JobExportArgs.OutputMode.valueOf(outputMode));
final String resultContent = "fake results";
final ByteArrayInputStream input = new ByteArrayInputStream(resultContent.getBytes(StandardCharsets.UTF_8));
when(service.export(eq(query), argThat(new JobExportArgsMatcher(expectedArgs)))).thenReturn(input);
runner.run();
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 1);
final List<MockFlowFile> mockFlowFiles = runner.getFlowFilesForRelationship(GetSplunk.REL_SUCCESS);
Assert.assertEquals(1, mockFlowFiles.size());
final MockFlowFile mockFlowFile = mockFlowFiles.get(0);
mockFlowFile.assertContentEquals(resultContent);
mockFlowFile.assertAttributeEquals(GetSplunk.QUERY_ATTR, query);
mockFlowFile.assertAttributeEquals(GetSplunk.EARLIEST_TIME_ATTR, providedEarliest);
mockFlowFile.assertAttributeEquals(GetSplunk.LATEST_TIME_ATTR, providedLatest);
Assert.assertEquals(1, proc.count);
final List<ProvenanceEventRecord> events = runner.getProvenanceEvents();
Assert.assertEquals(1, events.size());
Assert.assertEquals(ProvenanceEventType.RECEIVE, events.get(0).getEventType());
Assert.assertEquals("https://localhost:8089", events.get(0).getTransitUri());
}
use of com.splunk.JobExportArgs in project apex-malhar by apache.
the class AbstractSplunkInputOperator method setup.
@Override
public void setup(OperatorContext t1) {
super.setup(t1);
exportArgs = new JobExportArgs();
exportArgs.setEarliestTime(earliestTime);
exportArgs.setLatestTime(latestTime);
exportArgs.setSearchMode(JobExportArgs.SearchMode.NORMAL);
}
use of com.splunk.JobExportArgs in project nifi by apache.
the class TestGetSplunk method testGetWithManagedFromCurrentUsingIndexTime.
@Test
public void testGetWithManagedFromCurrentUsingIndexTime() throws IOException, ParseException {
final String query = "search tcp:7879";
final String outputMode = GetSplunk.ATOM_VALUE.getValue();
runner.setProperty(GetSplunk.QUERY, query);
runner.setProperty(GetSplunk.OUTPUT_MODE, outputMode);
runner.setProperty(GetSplunk.TIME_RANGE_STRATEGY, GetSplunk.MANAGED_CURRENT_VALUE.getValue());
runner.setProperty(GetSplunk.TIME_FIELD_STRATEGY, GetSplunk.INDEX_TIME_VALUE.getValue());
final String resultContent = "fake results";
final ByteArrayInputStream input = new ByteArrayInputStream(resultContent.getBytes(StandardCharsets.UTF_8));
when(service.export(eq(query), any(JobExportArgs.class))).thenReturn(input);
// run once and don't shut down, shouldn't produce any results first time
runner.run(1, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 0);
// capture what the args were on last run
verify(service, times(0)).export(eq(query), any(JobExportArgs.class));
final StateMap state = runner.getStateManager().getState(Scope.CLUSTER);
Assert.assertNotNull(state);
Assert.assertTrue(state.getVersion() > 0);
// save the latest time from the first run which should be earliest time of next run
final String lastLatest = state.get(GetSplunk.LATEST_TIME_KEY);
final SimpleDateFormat format = new SimpleDateFormat(GetSplunk.DATE_TIME_FORMAT);
format.setTimeZone(TimeZone.getTimeZone("UTC"));
final Date lastLatestDate = format.parse(lastLatest);
final String expectedLatest = format.format(new Date(lastLatestDate.getTime() + 1));
// run again
runner.run(1, false);
runner.assertAllFlowFilesTransferred(GetSplunk.REL_SUCCESS, 1);
final ArgumentCaptor<JobExportArgs> capture = ArgumentCaptor.forClass(JobExportArgs.class);
verify(service, times(1)).export(eq(query), capture.capture());
// second execution the earliest time should be the previous latest_time
final JobExportArgs actualArgs = capture.getValue();
Assert.assertNotNull(actualArgs);
Assert.assertEquals(expectedLatest, actualArgs.get("index_earliest"));
Assert.assertNotNull(actualArgs.get("index_latest"));
}
Aggregations