use of com.homeaway.datapullclient.exception.ProcessingException in project datapull by homeaway.
the class DataPullRequestProcessor method runDataPull.
private void runDataPull(String json, boolean isStart, boolean validateJson) throws ProcessingException {
String originalInputJson = json;
json = extractUserJsonFromS3IfProvided(json, isStart);
final EMRProperties emrProperties = this.config.getEmrProperties();
if (log.isDebugEnabled())
log.debug("runDataPull -> json = " + json + " isStart = " + isStart);
try {
if (validateJson) {
json = validateAndEnrich(json);
}
log.info("Running datapull for json : " + json + " cron expression = " + isStart + "env =" + env);
final ObjectNode node = new ObjectMapper().readValue(json, ObjectNode.class);
List<Map.Entry<String, JsonNode>> result = new LinkedList<Map.Entry<String, JsonNode>>();
Iterator<Map.Entry<String, JsonNode>> nodes = node.fields();
while (nodes.hasNext()) {
result.add(nodes.next());
}
JsonNode clusterNode = result.stream().filter(y -> y.getKey().equalsIgnoreCase("cluster")).map(x -> x.getValue()).findAny().get();
JsonNode migrationsNode = result.stream().filter(y -> y.getKey().equalsIgnoreCase("migrations")).map(x -> x.getValue()).findAny().get();
if (clusterNode == null)
throw new ProcessingException("Invalid Json!!! Cluster properties cannot be null");
String creator = node.has(CREATOR) ? node.findValue(CREATOR).asText() : "";
ObjectMapper mapper = new ObjectMapper();
ClusterProperties reader = mapper.treeToValue(clusterNode, ClusterProperties.class);
Migration[] myObjects = mapper.treeToValue(migrationsNode, Migration[].class);
String cronExp = Objects.toString(reader.getCronExpression(), "");
if (!cronExp.isEmpty())
cronExp = validateAndProcessCronExpression(cronExp);
String pipeline = Objects.toString(reader.getPipelineName(), UUID.randomUUID().toString());
String pipelineEnv = Objects.toString(reader.getAwsEnv(), env);
DataPullProperties dataPullProperties = config.getDataPullProperties();
String applicationHistoryFolder = dataPullProperties.getApplicationHistoryFolder();
String s3RepositoryBucketName = dataPullProperties.getS3BucketName();
String jobName = pipelineEnv + PIPELINE_NAME_DELIMITER + EMR + PIPELINE_NAME_DELIMITER + pipeline + PIPELINE_NAME_DELIMITER + PIPELINE_NAME_SUFFIX;
String applicationHistoryFolderPath = applicationHistoryFolder == null || applicationHistoryFolder.isEmpty() ? s3RepositoryBucketName + "/" + DATAPULL_HISTORY_FOLDER : applicationHistoryFolder;
String bootstrapFilePath = s3RepositoryBucketName + "/" + BOOTSTRAP_FOLDER;
String filePath = applicationHistoryFolderPath + "/" + jobName;
String bootstrapFile = jobName + ".sh";
String jksFilePath = bootstrapFilePath + "/" + bootstrapFile;
String bootstrapActionStringFromUser = Objects.toString(reader.getBootstrapactionstring(), "");
String defaultBootstrapString = emrProperties.getDefaultBootstrapString();
Boolean haveBootstrapAction = createBootstrapScript(myObjects, bootstrapFile, bootstrapFilePath, bootstrapActionStringFromUser, defaultBootstrapString);
DataPullTask task = createDataPullTask(filePath, jksFilePath, reader, jobName, creator, node.path("sparkjarfile").asText(), haveBootstrapAction);
if (!isStart) {
json = originalInputJson.equals(json) ? json : originalInputJson;
saveConfig(applicationHistoryFolderPath, jobName + ".json", json);
}
if (!isStart && tasksMap.containsKey(jobName))
cancelExistingTask(jobName);
if (!(isStart && cronExp.isEmpty())) {
Future<?> future = !cronExp.isEmpty() ? scheduler.schedule(task, new CronTrigger(cronExp)) : scheduler.schedule(task, new Date(System.currentTimeMillis() + 1 * 1000));
tasksMap.put(jobName, future);
}
} catch (IOException e) {
throw new ProcessingException("exception while starting datapull " + e.getLocalizedMessage());
}
if (log.isDebugEnabled())
log.debug("runDataPull <- return");
}
use of com.homeaway.datapullclient.exception.ProcessingException in project datapull by homeaway.
the class DataPullRequestProcessor method readFileFromS3.
public String readFileFromS3(AmazonS3 s3Client, String bucketName, String path) throws ProcessingException {
S3Object object = s3Client.getObject(new GetObjectRequest(bucketName, path));
StringBuilder out = new StringBuilder();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(object.getObjectContent()))) {
String line;
while ((line = reader.readLine()) != null) {
out.append(line);
}
} catch (IOException exception) {
throw new ProcessingException("Input json file invalid");
}
return out.toString();
}
use of com.homeaway.datapullclient.exception.ProcessingException in project datapull by homeaway.
the class DataPullRequestProcessor method runHistoricalTasksAndReadSchemaJson.
@PostConstruct
public void runHistoricalTasksAndReadSchemaJson() throws ProcessingException {
readExistingDataPullInputs();
try {
ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();
Resource[] resources = resolver.getResources("classpath*:/input_json_schema.json");
BufferedReader reader = new BufferedReader(new InputStreamReader(resources[0].getInputStream()));
JSONObject jsonSchema = new JSONObject(new JSONTokener(resources[0].getInputStream()));
inputJsonSchema = SchemaLoader.load(jsonSchema);
} catch (IOException e) {
throw new ProcessingException("Unable to create JSON validator", e);
}
}
use of com.homeaway.datapullclient.exception.ProcessingException in project datapull by homeaway.
the class DataPullRequestHandler method startDataPull.
@Override
public ResponseEntity startDataPull(HttpEntity<String> inputJson) {
if (log.isDebugEnabled())
log.debug("startDataPull -> inputJson=" + inputJson);
ResponseEntity entity = null;
try {
service.runDataPull(inputJson.getBody());
entity = new ResponseEntity(HttpStatus.ACCEPTED.value(), "Request Succesfully registered : " + inputJson);
} catch (ProcessingException e) {
throw new InputException("DataPull application failed for inputJson : " + inputJson + " \n " + e.getMessage());
}
if (log.isDebugEnabled())
log.debug("startDataPull <- return");
return entity;
}
use of com.homeaway.datapullclient.exception.ProcessingException in project datapull by homeaway.
the class DataPullRequestProcessor method extractUserJsonFromS3IfProvided.
public String extractUserJsonFromS3IfProvided(String json, boolean isStart) throws ProcessingException {
List<String> jsonS3PathList = new ArrayList<>();
try {
ObjectMapper mapper = new ObjectMapper();
JsonNode jsonNode = mapper.readTree(json);
while (jsonNode.has("jsoninputfile")) {
JsonNode jsonInputFileNode = jsonNode.path("jsoninputfile");
JsonInputFile jsonInputFile = mapper.treeToValue(jsonInputFileNode, JsonInputFile.class);
String s3path = jsonInputFile.getS3Path();
if (jsonS3PathList.contains(s3path)) {
throw new ProcessingException("JSON is pointing to same JSON.");
}
jsonS3PathList.add(s3path);
AmazonS3 s3Client = config.getS3Client();
String bucketName = s3path.substring(0, s3path.indexOf("/"));
String path = s3path.substring(s3path.indexOf("/") + 1);
json = readFileFromS3(s3Client, bucketName, path);
jsonNode = mapper.readTree(json);
}
} catch (IOException e) {
if (isStart) {
if (jsonS3PathList.size() != 0) {
throw new InvalidPointedJsonException("Invalid input json at path - " + jsonS3PathList.get(jsonS3PathList.size() - 1));
} else {
throw new InvalidPointedJsonException("Invalid input json - " + json);
}
} else {
throw new ProcessingException(e.getMessage());
}
}
return json;
}
Aggregations