use of org.apache.druid.segment.loading.SegmentLoadingException in project druid by druid-io.
the class OssDataSegmentKiller method kill.
@Override
public void kill(DataSegment segment) throws SegmentLoadingException {
try {
Map<String, Object> loadSpec = segment.getLoadSpec();
String bucket = MapUtils.getString(loadSpec, "bucket");
String path = MapUtils.getString(loadSpec, "key");
final OSS client = this.clientSupplier.get();
if (client.doesObjectExist(bucket, path)) {
log.info("Removing index file[%s://%s/%s] from aliyun OSS!", OssStorageDruidModule.SCHEME, bucket, path);
client.deleteObject(bucket, path);
}
} catch (OSSException e) {
throw new SegmentLoadingException(e, "Couldn't kill segment[%s]: [%s]", segment.getId(), e);
}
}
use of org.apache.druid.segment.loading.SegmentLoadingException in project druid by druid-io.
the class CloudFilesDataSegmentPuller method getSegmentFiles.
FileUtils.FileCopyResult getSegmentFiles(String region, String container, String path, File outDir) throws SegmentLoadingException {
CloudFilesObjectApiProxy objectApi = new CloudFilesObjectApiProxy(cloudFilesApi, region, container);
final CloudFilesByteSource byteSource = new CloudFilesByteSource(objectApi, path);
try {
final FileUtils.FileCopyResult result = CompressionUtils.unzip(byteSource, outDir, CloudFilesUtils.CLOUDFILESRETRY, false);
log.info("Loaded %d bytes from [%s] to [%s]", result.size(), path, outDir.getAbsolutePath());
return result;
} catch (Exception e) {
try {
FileUtils.deleteDirectory(outDir);
} catch (IOException ioe) {
log.warn(ioe, "Failed to remove output directory [%s] for segment pulled from [%s]", outDir.getAbsolutePath(), path);
}
throw new SegmentLoadingException(e, e.getMessage());
} finally {
try {
byteSource.closeStream();
} catch (IOException ioe) {
log.warn(ioe, "Failed to close payload for segmente pulled from [%s]", path);
}
}
}
use of org.apache.druid.segment.loading.SegmentLoadingException in project druid by druid-io.
the class AzureDataSegmentKiller method kill.
@Override
public void kill(DataSegment segment) throws SegmentLoadingException {
log.info("Killing segment [%s]", segment);
Map<String, Object> loadSpec = segment.getLoadSpec();
final String containerName = MapUtils.getString(loadSpec, "containerName");
final String blobPath = MapUtils.getString(loadSpec, "blobPath");
final String dirPath = Paths.get(blobPath).getParent().toString();
try {
azureStorage.emptyCloudBlobDirectory(containerName, dirPath);
} catch (StorageException e) {
Object extendedInfo = e.getExtendedErrorInformation() == null ? null : e.getExtendedErrorInformation().getErrorMessage();
throw new SegmentLoadingException(e, "Couldn't kill segment[%s]: [%s]", segment.getId(), extendedInfo);
} catch (URISyntaxException e) {
throw new SegmentLoadingException(e, "Couldn't kill segment[%s]: [%s]", segment.getId(), e.getReason());
}
}
use of org.apache.druid.segment.loading.SegmentLoadingException in project druid by druid-io.
the class AzureDataSegmentPullerTest method test_getSegmentFiles_nonRecoverableErrorRaisedWhenPullingSegmentFiles_doNotDeleteOutputDirectory.
@Test(expected = RuntimeException.class)
public void test_getSegmentFiles_nonRecoverableErrorRaisedWhenPullingSegmentFiles_doNotDeleteOutputDirectory() throws IOException, URISyntaxException, StorageException, SegmentLoadingException {
final File outDir = FileUtils.createTempDir();
try {
EasyMock.expect(byteSourceFactory.create(CONTAINER_NAME, BLOB_PATH)).andReturn(new AzureByteSource(azureStorage, CONTAINER_NAME, BLOB_PATH));
EasyMock.expect(azureStorage.getBlobInputStream(0L, CONTAINER_NAME, BLOB_PATH)).andThrow(new URISyntaxException("error", "error", 404));
replayAll();
AzureDataSegmentPuller puller = new AzureDataSegmentPuller(byteSourceFactory);
puller.getSegmentFiles(CONTAINER_NAME, BLOB_PATH, outDir);
} catch (Exception e) {
Assert.assertTrue(outDir.exists());
verifyAll();
throw e;
} finally {
FileUtils.deleteDirectory(outDir);
}
}
use of org.apache.druid.segment.loading.SegmentLoadingException in project druid by druid-io.
the class AzureDataSegmentPuller method getSegmentFiles.
FileUtils.FileCopyResult getSegmentFiles(final String containerName, final String blobPath, final File outDir) throws SegmentLoadingException {
try {
FileUtils.mkdirp(outDir);
log.info("Loading container: [%s], with blobPath: [%s] and outDir: [%s]", containerName, blobPath, outDir);
final String actualBlobPath = AzureUtils.maybeRemoveAzurePathPrefix(blobPath);
final ByteSource byteSource = byteSourceFactory.create(containerName, actualBlobPath);
final FileUtils.FileCopyResult result = CompressionUtils.unzip(byteSource, outDir, AzureUtils.AZURE_RETRY, false);
log.info("Loaded %d bytes from [%s] to [%s]", result.size(), actualBlobPath, outDir.getAbsolutePath());
return result;
} catch (IOException e) {
try {
FileUtils.deleteDirectory(outDir);
} catch (IOException ioe) {
log.warn(ioe, "Failed to remove output directory [%s] for segment pulled from [%s]", outDir.getAbsolutePath(), blobPath);
}
throw new SegmentLoadingException(e, e.getMessage());
}
}
Aggregations