use of org.jets3t.service.model.StorageObject in project hadoop by apache.
the class Jets3tNativeFileSystemStore method retrieveMetadata.
@Override
public FileMetadata retrieveMetadata(String key) throws IOException {
StorageObject object = null;
try {
LOG.debug("Getting metadata for key: {} from bucket: {}", key, bucket.getName());
object = s3Service.getObjectDetails(bucket.getName(), key);
return new FileMetadata(key, object.getContentLength(), object.getLastModifiedDate().getTime());
} catch (ServiceException e) {
try {
// process
handleException(e, key);
return null;
} catch (FileNotFoundException fnfe) {
// and downgrade missing files
return null;
}
} finally {
if (object != null) {
object.closeDataInputStream();
}
}
}
use of org.jets3t.service.model.StorageObject in project hadoop by apache.
the class Jets3tNativeFileSystemStore method storeLargeFile.
public void storeLargeFile(String key, File file, byte[] md5Hash) throws IOException {
S3Object object = new S3Object(key);
object.setDataInputFile(file);
object.setContentType("binary/octet-stream");
object.setContentLength(file.length());
object.setServerSideEncryptionAlgorithm(serverSideEncryptionAlgorithm);
if (md5Hash != null) {
object.setMd5Hash(md5Hash);
}
List<StorageObject> objectsToUploadAsMultipart = new ArrayList<StorageObject>();
objectsToUploadAsMultipart.add(object);
MultipartUtils mpUtils = new MultipartUtils(multipartBlockSize);
try {
mpUtils.uploadObjects(bucket.getName(), s3Service, objectsToUploadAsMultipart, null);
} catch (Exception e) {
handleException(e, key);
}
}
use of org.jets3t.service.model.StorageObject in project druid by druid-io.
the class S3DataSegmentFinder method findSegments.
@Override
public Set<DataSegment> findSegments(String workingDirPath, boolean updateDescriptor) throws SegmentLoadingException {
final Set<DataSegment> segments = Sets.newHashSet();
try {
Iterator<StorageObject> objectsIterator = S3Utils.storageObjectsIterator(s3Client, config.getBucket(), workingDirPath.length() == 0 ? config.getBaseKey() : workingDirPath, config.getMaxListingLength());
while (objectsIterator.hasNext()) {
StorageObject storageObject = objectsIterator.next();
storageObject.closeDataInputStream();
if (S3Utils.toFilename(storageObject.getKey()).equals("descriptor.json")) {
final String descriptorJson = storageObject.getKey();
String indexZip = S3Utils.indexZipForSegmentPath(descriptorJson);
if (S3Utils.isObjectInBucket(s3Client, config.getBucket(), indexZip)) {
S3Object indexObject = s3Client.getObject(config.getBucket(), descriptorJson);
try (InputStream is = indexObject.getDataInputStream()) {
final DataSegment dataSegment = jsonMapper.readValue(is, DataSegment.class);
log.info("Found segment [%s] located at [%s]", dataSegment.getIdentifier(), indexZip);
final Map<String, Object> loadSpec = dataSegment.getLoadSpec();
if (!loadSpec.get("type").equals(S3StorageDruidModule.SCHEME) || !loadSpec.get("key").equals(indexZip)) {
loadSpec.put("type", S3StorageDruidModule.SCHEME);
loadSpec.put("key", indexZip);
if (updateDescriptor) {
log.info("Updating loadSpec in descriptor.json at [%s] with new path [%s]", descriptorJson, indexObject);
S3Object newDescJsonObject = new S3Object(descriptorJson, jsonMapper.writeValueAsString(dataSegment));
s3Client.putObject(config.getBucket(), newDescJsonObject);
}
}
segments.add(dataSegment);
}
} else {
throw new SegmentLoadingException("index.zip didn't exist at [%s] while descriptor.json exists!?", indexZip);
}
}
}
} catch (ServiceException e) {
throw new SegmentLoadingException(e, "Problem interacting with S3");
} catch (IOException e) {
throw new SegmentLoadingException(e, "IO exception");
} catch (Exception e) {
Throwables.propagateIfInstanceOf(e, SegmentLoadingException.class);
Throwables.propagate(e);
}
return segments;
}
use of org.jets3t.service.model.StorageObject in project druid by druid-io.
the class S3DataSegmentPuller method buildFileObject.
public static FileObject buildFileObject(final URI uri, final RestS3Service s3Client) throws ServiceException {
final S3Coords coords = new S3Coords(checkURI(uri));
final StorageObject s3Obj = s3Client.getObjectDetails(coords.bucket, coords.path);
final String path = uri.getPath();
return new FileObject() {
final Object inputStreamOpener = new Object();
volatile boolean streamAcquired = false;
volatile StorageObject storageObject = s3Obj;
@Override
public URI toUri() {
return uri;
}
@Override
public String getName() {
final String ext = Files.getFileExtension(path);
return Files.getNameWithoutExtension(path) + (Strings.isNullOrEmpty(ext) ? "" : ("." + ext));
}
@Override
public InputStream openInputStream() throws IOException {
try {
synchronized (inputStreamOpener) {
if (streamAcquired) {
return storageObject.getDataInputStream();
}
// lazily promote to full GET
storageObject = s3Client.getObject(s3Obj.getBucketName(), s3Obj.getKey());
final InputStream stream = storageObject.getDataInputStream();
streamAcquired = true;
return stream;
}
} catch (ServiceException e) {
throw new IOException(StringUtils.safeFormat("Could not load S3 URI [%s]", uri), e);
}
}
@Override
public OutputStream openOutputStream() throws IOException {
throw new UOE("Cannot stream S3 output");
}
@Override
public Reader openReader(boolean ignoreEncodingErrors) throws IOException {
throw new UOE("Cannot open reader");
}
@Override
public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException {
throw new UOE("Cannot open character sequence");
}
@Override
public Writer openWriter() throws IOException {
throw new UOE("Cannot open writer");
}
@Override
public long getLastModified() {
return s3Obj.getLastModifiedDate().getTime();
}
@Override
public boolean delete() {
throw new UOE("Cannot delete S3 items anonymously. jetS3t doesn't support authenticated deletes easily.");
}
};
}
use of org.jets3t.service.model.StorageObject in project druid by druid-io.
the class S3TaskLogs method pushTaskLog.
public void pushTaskLog(final String taskid, final File logFile) throws IOException {
final String taskKey = getTaskLogKey(taskid);
log.info("Pushing task log %s to: %s", logFile, taskKey);
try {
S3Utils.retryS3Operation(new Callable<Void>() {
@Override
public Void call() throws Exception {
final StorageObject object = new StorageObject(logFile);
object.setKey(taskKey);
service.putObject(config.getS3Bucket(), object);
return null;
}
});
} catch (Exception e) {
Throwables.propagateIfInstanceOf(e, IOException.class);
throw Throwables.propagate(e);
}
}
Aggregations