use of com.amazonaws.services.neptune.util.S3ObjectInfo in project amazon-neptune-tools by awslabs.
the class ExportToS3NeptuneExportEventHandler method uploadCompletionFileToS3.
private void uploadCompletionFileToS3(TransferManager transferManager, File directory, S3ObjectInfo outputS3ObjectInfo, ExportStats stats, GraphSchema graphSchema) throws IOException {
if (StringUtils.isEmpty(completionFileS3Path)) {
return;
}
if (directory == null || !directory.exists()) {
logger.warn("Ignoring request to upload completion file to S3 because directory from which to upload files does not exist");
return;
}
String completionFilename = s3UploadParams.createExportSubdirectory() ? directory.getName() : String.valueOf(System.currentTimeMillis());
File completionFile = new File(localOutputPath, completionFilename + ".json");
ObjectNode neptuneExportNode = JsonNodeFactory.instance.objectNode();
completionFilePayload.set("neptuneExport", neptuneExportNode);
neptuneExportNode.put("outputS3Path", outputS3ObjectInfo.toString());
stats.addTo(neptuneExportNode, graphSchema);
for (CompletionFileWriter completionFileWriter : completionFileWriters) {
completionFileWriter.updateCompletionFile(completionFilePayload);
}
try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(completionFile), UTF_8))) {
ObjectWriter objectWriter = new ObjectMapper().writer().withDefaultPrettyPrinter();
writer.write(objectWriter.writeValueAsString(completionFilePayload));
}
S3ObjectInfo completionFileS3ObjectInfo = new S3ObjectInfo(completionFileS3Path).replaceOrAppendKey("_COMPLETION_ID_", FilenameUtils.getBaseName(completionFile.getName()), completionFile.getName());
logger.info("Uploading completion file to {}", completionFileS3ObjectInfo.key());
try (InputStream inputStream = new FileInputStream(completionFile)) {
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentLength(completionFile.length());
objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
PutObjectRequest putObjectRequest = new PutObjectRequest(completionFileS3ObjectInfo.bucket(), completionFileS3ObjectInfo.key(), inputStream, objectMetadata).withTagging(createObjectTags(profiles));
Upload upload = transferManager.upload(putObjectRequest);
upload.waitForUploadResult();
} catch (InterruptedException e) {
logger.warn(e.getMessage());
Thread.currentThread().interrupt();
}
}
use of com.amazonaws.services.neptune.util.S3ObjectInfo in project amazon-neptune-tools by awslabs.
the class ExportToS3NeptuneExportEventHandler method deleteS3Directories.
private void deleteS3Directories(Directories directories, S3ObjectInfo outputS3ObjectInfo) {
if (!s3UploadParams.overwriteExisting()) {
return;
}
List<S3ObjectInfo> leafS3Directories = new ArrayList<>();
Path rootDirectory = directories.rootDirectory();
for (Path subdirectory : directories.subdirectories()) {
String newKey = rootDirectory.relativize(subdirectory).toString();
leafS3Directories.add(outputS3ObjectInfo.withNewKeySuffix(newKey));
}
}
use of com.amazonaws.services.neptune.util.S3ObjectInfo in project amazon-neptune-tools by awslabs.
the class ExportToS3NeptuneExportEventHandler method onExportComplete.
@Override
public void onExportComplete(Directories directories, ExportStats stats, Cluster cluster, GraphSchema graphSchema) throws Exception {
try {
long size = Files.walk(directories.rootDirectory()).mapToLong(p -> p.toFile().length()).sum();
logger.info("Total size of exported files: {}", FileUtils.byteCountToDisplaySize(size));
} catch (Exception e) {
// Ignore
}
if (StringUtils.isEmpty(outputS3Path)) {
return;
}
logger.info("S3 upload params: {}", s3UploadParams);
try (TransferManagerWrapper transferManager = new TransferManagerWrapper(s3Region)) {
File outputDirectory = directories.rootDirectory().toFile();
S3ObjectInfo outputS3ObjectInfo = calculateOutputS3Path(outputDirectory);
Timer.timedActivity("uploading files to S3", (CheckedActivity.Runnable) () -> {
deleteS3Directories(directories, outputS3ObjectInfo);
uploadExportFilesToS3(transferManager.get(), outputDirectory, outputS3ObjectInfo);
uploadCompletionFileToS3(transferManager.get(), outputDirectory, outputS3ObjectInfo, stats, graphSchema);
});
result.set(outputS3ObjectInfo);
}
}
use of com.amazonaws.services.neptune.util.S3ObjectInfo in project amazon-neptune-tools by awslabs.
the class NeptuneExportLambda method handleRequest.
@Override
public void handleRequest(InputStream inputStream, OutputStream outputStream, Context context) throws IOException {
Logger logger = s -> context.getLogger().log(s);
ObjectMapper objectMapper = new ObjectMapper();
JsonNode json = objectMapper.readTree(IOUtils.toString(inputStream, UTF_8.name()));
String cmd = json.has("command") ? json.path("command").textValue() : EnvironmentVariableUtils.getOptionalEnv("COMMAND", "export-pg");
ObjectNode params = json.has("params") ? (ObjectNode) json.get("params") : objectMapper.readTree("{}").deepCopy();
String outputS3Path = json.has("outputS3Path") ? json.path("outputS3Path").textValue() : EnvironmentVariableUtils.getOptionalEnv("OUTPUT_S3_PATH", "");
boolean createExportSubdirectory = Boolean.parseBoolean(json.has("createExportSubdirectory") ? json.path("createExportSubdirectory").toString() : EnvironmentVariableUtils.getOptionalEnv("CREATE_EXPORT_SUBDIRECTORY", "true"));
boolean overwriteExisting = Boolean.parseBoolean(json.has("overwriteExisting") ? json.path("overwriteExisting").toString() : EnvironmentVariableUtils.getOptionalEnv("OVERWRITE_EXISTING", "false"));
boolean uploadToS3OnError = Boolean.parseBoolean(json.has("uploadToS3OnError") ? json.path("uploadToS3OnError").toString() : EnvironmentVariableUtils.getOptionalEnv("UPLOAD_TO_S3_ON_ERROR", "true"));
String configFileS3Path = json.has("configFileS3Path") ? json.path("configFileS3Path").textValue() : EnvironmentVariableUtils.getOptionalEnv("CONFIG_FILE_S3_PATH", "");
String queriesFileS3Path = json.has("queriesFileS3Path") ? json.path("queriesFileS3Path").textValue() : EnvironmentVariableUtils.getOptionalEnv("QUERIES_FILE_S3_PATH", "");
String completionFileS3Path = json.has("completionFileS3Path") ? json.path("completionFileS3Path").textValue() : EnvironmentVariableUtils.getOptionalEnv("COMPLETION_FILE_S3_PATH", "");
String s3Region = json.has("s3Region") ? json.path("s3Region").textValue() : EnvironmentVariableUtils.getOptionalEnv("S3_REGION", EnvironmentVariableUtils.getOptionalEnv("AWS_REGION", ""));
ObjectNode completionFilePayload = json.has("completionFilePayload") ? json.path("completionFilePayload").deepCopy() : objectMapper.readTree(EnvironmentVariableUtils.getOptionalEnv("COMPLETION_FILE_PAYLOAD", "{}")).deepCopy();
ObjectNode additionalParams = json.has("additionalParams") ? json.path("additionalParams").deepCopy() : objectMapper.readTree("{}").deepCopy();
int maxConcurrency = json.has("jobSize") ? JobSize.parse(json.path("jobSize").textValue()).maxConcurrency() : -1;
logger.log("cmd : " + cmd);
logger.log("params : " + params.toPrettyString());
logger.log("outputS3Path : " + outputS3Path);
logger.log("createExportSubdirectory : " + createExportSubdirectory);
logger.log("overwriteExisting : " + overwriteExisting);
logger.log("uploadToS3OnError : " + uploadToS3OnError);
logger.log("configFileS3Path : " + configFileS3Path);
logger.log("queriesFileS3Path : " + queriesFileS3Path);
logger.log("completionFileS3Path : " + completionFileS3Path);
logger.log("s3Region : " + s3Region);
logger.log("completionFilePayload : " + completionFilePayload.toPrettyString());
logger.log("additionalParams : " + additionalParams.toPrettyString());
if (!cmd.contains(" ") && !params.isEmpty()) {
cmd = ParamConverter.fromJson(cmd, params).toString();
}
logger.log("revised cmd : " + cmd);
NeptuneExportService neptuneExportService = new NeptuneExportService(cmd, localOutputPath, cleanOutputPath, outputS3Path, createExportSubdirectory, overwriteExisting, uploadToS3OnError, configFileS3Path, queriesFileS3Path, completionFileS3Path, completionFilePayload, additionalParams, maxConcurrency, s3Region, maxFileDescriptorCount);
S3ObjectInfo outputS3ObjectInfo = neptuneExportService.execute();
if (StringUtils.isEmpty(outputS3Path)) {
return;
}
if (outputS3ObjectInfo != null) {
try (Writer writer = new BufferedWriter(new OutputStreamWriter(outputStream, UTF_8))) {
writer.write(outputS3ObjectInfo.toString());
}
} else {
System.exit(-1);
}
}
use of com.amazonaws.services.neptune.util.S3ObjectInfo in project amazon-neptune-tools by awslabs.
the class NeptuneMachineLearningExportEventHandlerV1 method uploadTrainingJobConfigurationFileToS3.
private void uploadTrainingJobConfigurationFileToS3(String filename, TransferManager transferManager, File trainingJobConfigurationFile, S3ObjectInfo outputS3ObjectInfo) throws IOException {
S3ObjectInfo s3ObjectInfo = outputS3ObjectInfo.withNewKeySuffix(filename);
try (InputStream inputStream = new FileInputStream(trainingJobConfigurationFile)) {
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setContentLength(trainingJobConfigurationFile.length());
objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
PutObjectRequest putObjectRequest = new PutObjectRequest(s3ObjectInfo.bucket(), s3ObjectInfo.key(), inputStream, objectMetadata).withTagging(ExportToS3NeptuneExportEventHandler.createObjectTags(profiles));
Upload upload = transferManager.upload(putObjectRequest);
upload.waitForUploadResult();
} catch (InterruptedException e) {
logger.warn(e.getMessage());
Thread.currentThread().interrupt();
}
}
Aggregations