Search in sources :

Example 16 with StorageException

use of com.microsoft.azure.storage.StorageException in project hadoop by apache.

the class TestBlobDataValidation method testStoreBlobMd5.

private void testStoreBlobMd5(boolean expectMd5Stored) throws Exception {
    assumeNotNull(testAccount);
    // Write a test file.
    String testFileKey = "testFile";
    Path testFilePath = new Path("/" + testFileKey);
    OutputStream outStream = testAccount.getFileSystem().create(testFilePath);
    outStream.write(new byte[] { 5, 15 });
    outStream.close();
    // Check that we stored/didn't store the MD5 field as configured.
    CloudBlockBlob blob = testAccount.getBlobReference(testFileKey);
    blob.downloadAttributes();
    String obtainedMd5 = blob.getProperties().getContentMD5();
    if (expectMd5Stored) {
        assertNotNull(obtainedMd5);
    } else {
        assertNull("Expected no MD5, found: " + obtainedMd5, obtainedMd5);
    }
    // Mess with the content so it doesn't match the MD5.
    String newBlockId = Base64.encode(new byte[] { 55, 44, 33, 22 });
    blob.uploadBlock(newBlockId, new ByteArrayInputStream(new byte[] { 6, 45 }), 2);
    blob.commitBlockList(Arrays.asList(new BlockEntry[] { new BlockEntry(newBlockId, BlockSearchMode.UNCOMMITTED) }));
    // Now read back the content. If we stored the MD5 for the blob content
    // we should get a data corruption error.
    InputStream inStream = testAccount.getFileSystem().open(testFilePath);
    try {
        byte[] inBuf = new byte[100];
        while (inStream.read(inBuf) > 0) {
        //nothing;
        }
        inStream.close();
        if (expectMd5Stored) {
            fail("Should've thrown because of data corruption.");
        }
    } catch (IOException ex) {
        if (!expectMd5Stored) {
            throw ex;
        }
        StorageException cause = (StorageException) ex.getCause();
        assertNotNull(cause);
        assertTrue("Unexpected cause: " + cause, cause.getErrorCode().equals(StorageErrorCodeStrings.INVALID_MD5));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) ByteArrayInputStream(java.io.ByteArrayInputStream) BlockEntry(com.microsoft.azure.storage.blob.BlockEntry) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) IOException(java.io.IOException) CloudBlockBlob(com.microsoft.azure.storage.blob.CloudBlockBlob) StorageException(com.microsoft.azure.storage.StorageException)

Example 17 with StorageException

use of com.microsoft.azure.storage.StorageException in project hadoop by apache.

the class TestNativeAzureFileSystemLive method testDeleteThrowsExceptionWithLeaseExistsErrorMessage.

/**
   * Tests fs.delete() function to delete a blob when another blob is holding a
   * lease on it. Delete if called without a lease should fail if another process
   * is holding a lease and throw appropriate exception
   * This is a scenario that would happen in HMaster startup when it tries to
   * clean up the temp dirs while the HMaster process which was killed earlier
   * held lease on the blob when doing some DDL operation
   */
@Test
public void testDeleteThrowsExceptionWithLeaseExistsErrorMessage() throws Exception {
    LOG.info("Starting test");
    final String FILE_KEY = "fileWithLease";
    // Create the file
    Path path = new Path(FILE_KEY);
    fs.create(path);
    assertTrue(fs.exists(path));
    NativeAzureFileSystem nfs = (NativeAzureFileSystem) fs;
    final String fullKey = nfs.pathToKey(nfs.makeAbsolute(path));
    final AzureNativeFileSystemStore store = nfs.getStore();
    // Acquire the lease on the file in a background thread
    final CountDownLatch leaseAttemptComplete = new CountDownLatch(1);
    final CountDownLatch beginningDeleteAttempt = new CountDownLatch(1);
    Thread t = new Thread() {

        @Override
        public void run() {
            // Acquire the lease and then signal the main test thread.
            SelfRenewingLease lease = null;
            try {
                lease = store.acquireLease(fullKey);
                LOG.info("Lease acquired: " + lease.getLeaseID());
            } catch (AzureException e) {
                LOG.warn("Lease acqusition thread unable to acquire lease", e);
            } finally {
                leaseAttemptComplete.countDown();
            }
            // Wait for the main test thread to signal it will attempt the delete.
            try {
                beginningDeleteAttempt.await();
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
            }
            // the test covers the case of delete retrying to acquire the lease.
            try {
                Thread.sleep(SelfRenewingLease.LEASE_ACQUIRE_RETRY_INTERVAL * 3);
            } catch (InterruptedException ex) {
                Thread.currentThread().interrupt();
            }
            try {
                if (lease != null) {
                    LOG.info("Freeing lease");
                    lease.free();
                }
            } catch (StorageException se) {
                LOG.warn("Unable to free lease.", se);
            }
        }
    };
    // Start the background thread and wait for it to signal the lease is held.
    t.start();
    try {
        leaseAttemptComplete.await();
    } catch (InterruptedException ex) {
        Thread.currentThread().interrupt();
    }
    // Try to delete the same file
    beginningDeleteAttempt.countDown();
    store.delete(fullKey);
    // At this point file SHOULD BE DELETED
    assertFalse(fs.exists(path));
}
Also used : Path(org.apache.hadoop.fs.Path) CountDownLatch(java.util.concurrent.CountDownLatch) StorageException(com.microsoft.azure.storage.StorageException) Test(org.junit.Test)

Example 18 with StorageException

use of com.microsoft.azure.storage.StorageException in project druid by druid-io.

the class AzureDataSegmentKiller method kill.

@Override
public void kill(DataSegment segment) throws SegmentLoadingException {
    log.info("Killing segment [%s]", segment);
    Map<String, Object> loadSpec = segment.getLoadSpec();
    final String containerName = MapUtils.getString(loadSpec, "containerName");
    final String blobPath = MapUtils.getString(loadSpec, "blobPath");
    final String dirPath = Paths.get(blobPath).getParent().toString();
    try {
        azureStorage.emptyCloudBlobDirectory(containerName, dirPath);
    } catch (StorageException e) {
        throw new SegmentLoadingException(e, "Couldn't kill segment[%s]: [%s]", segment.getIdentifier(), e.getExtendedErrorInformation() == null ? null : e.getExtendedErrorInformation().getErrorMessage());
    } catch (URISyntaxException e) {
        throw new SegmentLoadingException(e, "Couldn't kill segment[%s]: [%s]", segment.getIdentifier(), e.getReason());
    }
}
Also used : SegmentLoadingException(io.druid.segment.loading.SegmentLoadingException) URISyntaxException(java.net.URISyntaxException) StorageException(com.microsoft.azure.storage.StorageException)

Example 19 with StorageException

use of com.microsoft.azure.storage.StorageException in project druid by druid-io.

the class AzureTaskLogs method streamTaskLog.

@Override
public Optional<ByteSource> streamTaskLog(final String taskid, final long offset) throws IOException {
    final String container = config.getContainer();
    final String taskKey = getTaskLogKey(taskid);
    try {
        if (!azureStorage.getBlobExists(container, taskKey)) {
            return Optional.absent();
        }
        return Optional.<ByteSource>of(new ByteSource() {

            @Override
            public InputStream openStream() throws IOException {
                try {
                    final long start;
                    final long length = azureStorage.getBlobLength(container, taskKey);
                    if (offset > 0 && offset < length) {
                        start = offset;
                    } else if (offset < 0 && (-1 * offset) < length) {
                        start = length + offset;
                    } else {
                        start = 0;
                    }
                    InputStream stream = azureStorage.getBlobInputStream(container, taskKey);
                    stream.skip(start);
                    return stream;
                } catch (Exception e) {
                    throw new IOException(e);
                }
            }
        });
    } catch (StorageException | URISyntaxException e) {
        throw new IOException(String.format("Failed to stream logs from: %s", taskKey), e);
    }
}
Also used : InputStream(java.io.InputStream) ByteSource(com.google.common.io.ByteSource) IOException(java.io.IOException) URISyntaxException(java.net.URISyntaxException) StorageException(com.microsoft.azure.storage.StorageException) StorageException(com.microsoft.azure.storage.StorageException) URISyntaxException(java.net.URISyntaxException) IOException(java.io.IOException)

Example 20 with StorageException

use of com.microsoft.azure.storage.StorageException in project druid by druid-io.

the class AzureByteSourceTest method openStreamWithRecoverableErrorTest.

@Test(expected = IOException.class)
public void openStreamWithRecoverableErrorTest() throws URISyntaxException, StorageException, IOException {
    final String containerName = "container";
    final String blobPath = "/path/to/file";
    AzureStorage azureStorage = createMock(AzureStorage.class);
    expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow(new StorageException("", "", 500, null, null));
    replayAll();
    AzureByteSource byteSource = new AzureByteSource(azureStorage, containerName, blobPath);
    byteSource.openStream();
    verifyAll();
}
Also used : StorageException(com.microsoft.azure.storage.StorageException) Test(org.junit.Test)

Aggregations

StorageException (com.microsoft.azure.storage.StorageException)55 URISyntaxException (java.net.URISyntaxException)34 IOException (java.io.IOException)31 Path (org.apache.hadoop.fs.Path)13 FileNotFoundException (java.io.FileNotFoundException)10 DataStoreException (org.apache.jackrabbit.core.data.DataStoreException)10 CloudBlockBlob (com.microsoft.azure.storage.blob.CloudBlockBlob)8 InputStream (java.io.InputStream)7 JsonParseException (com.fasterxml.jackson.core.JsonParseException)5 JsonMappingException (com.fasterxml.jackson.databind.JsonMappingException)5 CloudStorageAccount (com.microsoft.azure.storage.CloudStorageAccount)5 CloudBlob (com.microsoft.azure.storage.blob.CloudBlob)5 EOFException (java.io.EOFException)5 InvalidKeyException (java.security.InvalidKeyException)5 FileAlreadyExistsException (org.apache.hadoop.fs.FileAlreadyExistsException)5 AccessCondition (com.microsoft.azure.storage.AccessCondition)4 BlobRequestOptions (com.microsoft.azure.storage.blob.BlobRequestOptions)4 CloudBlobClient (com.microsoft.azure.storage.blob.CloudBlobClient)4 CloudBlobContainer (com.microsoft.azure.storage.blob.CloudBlobContainer)4 CloudBlobDirectory (com.microsoft.azure.storage.blob.CloudBlobDirectory)4