Search in sources :

Example 96 with Bucket

use of software.amazon.awssdk.services.s3.model.Bucket in project pravega by pravega.

the class S3ChunkStorage method doDelete.

@Override
protected void doDelete(ChunkHandle handle) throws ChunkStorageException {
    try {
        // check whether the chunk exists
        if (!checkExists(handle.getChunkName())) {
            throw new ChunkNotFoundException(handle.getChunkName(), "doDelete");
        }
        DeleteObjectRequest deleteRequest = DeleteObjectRequest.builder().bucket(this.config.getBucket()).key(getObjectPath(handle.getChunkName())).build();
        client.deleteObject(deleteRequest);
    } catch (Exception e) {
        throw convertException(handle.getChunkName(), "doDelete", e);
    }
}
Also used : ChunkNotFoundException(io.pravega.segmentstore.storage.chunklayer.ChunkNotFoundException) DeleteObjectRequest(software.amazon.awssdk.services.s3.model.DeleteObjectRequest) S3Exception(software.amazon.awssdk.services.s3.model.S3Exception) ChunkStorageException(io.pravega.segmentstore.storage.chunklayer.ChunkStorageException) ChunkNotFoundException(io.pravega.segmentstore.storage.chunklayer.ChunkNotFoundException) ChunkAlreadyExistsException(io.pravega.segmentstore.storage.chunklayer.ChunkAlreadyExistsException)

Example 97 with Bucket

use of software.amazon.awssdk.services.s3.model.Bucket in project pentaho-kettle by pentaho.

the class S3CsvInputDialog method getCSV.

// Get the data layout
private void getCSV() {
    InputStream inputStream = null;
    try {
        S3CsvInputMeta meta = new S3CsvInputMeta();
        getInfo(meta);
        String filename = transMeta.environmentSubstitute(meta.getFilename());
        String bucketname = transMeta.environmentSubstitute(meta.getBucket());
        int maxLineSize = Const.toInt(transMeta.environmentSubstitute(meta.getMaxLineSize()), 2000);
        wFields.table.removeAll();
        S3ObjectsProvider s3ObjProvider = new S3ObjectsProvider(meta.getS3Client(transMeta));
        Bucket s3bucket = s3ObjProvider.getBucket(bucketname);
        if (s3bucket == null) {
            throw new Exception(Messages.getString("S3DefaultService.Exception.UnableToFindBucket.Message", bucketname));
        }
        // Now we can continue reading the rows of data and we can guess the
        // Sample a few lines to determine the correct type of the fields...
        // 
        String shellText = Messages.getString("S3CsvInputDialog.LinesToSample.DialogTitle");
        String lineText = Messages.getString("S3CsvInputDialog.LinesToSample.DialogMessage");
        EnterNumberDialog end = new EnterNumberDialog(shell, 100, shellText, lineText);
        int samples = end.open();
        if (samples < 0) {
            return;
        }
        // Only get the first lines, not the complete file
        // And grab an input stream to the data...
        inputStream = s3ObjProvider.getS3Object(s3bucket, filename, 0L, (long) samples * (long) maxLineSize).getObjectContent();
        InputStreamReader reader = new InputStreamReader(inputStream);
        // Read a line of data to determine the number of rows...
        // 
        String line = TextFileInput.getLine(log, reader, TextFileInputMeta.FILE_FORMAT_MIXED, new StringBuilder(1000));
        // Split the string, header or data into parts...
        // 
        String[] fieldNames = Const.splitString(line, meta.getDelimiter());
        if (!meta.isHeaderPresent()) {
            // Don't use field names from the header...
            // Generate field names F1 ... F10
            // 
            // $NON-NLS-1$
            DecimalFormat df = new DecimalFormat("000");
            for (int i = 0; i < fieldNames.length; i++) {
                // $NON-NLS-1$
                fieldNames[i] = "Field_" + df.format(i);
            }
        } else {
            if (!Utils.isEmpty(meta.getEnclosure())) {
                for (int i = 0; i < fieldNames.length; i++) {
                    if (fieldNames[i].startsWith(meta.getEnclosure()) && fieldNames[i].endsWith(meta.getEnclosure()) && fieldNames[i].length() > 1) {
                        fieldNames[i] = fieldNames[i].substring(1, fieldNames[i].length() - 1);
                    }
                }
            }
        }
        // 
        for (int i = 0; i < fieldNames.length; i++) {
            fieldNames[i] = Const.trim(fieldNames[i]);
        }
        // 
        for (int i = 0; i < fieldNames.length; i++) {
            TableItem item = new TableItem(wFields.table, SWT.NONE);
            item.setText(1, fieldNames[i]);
            item.setText(2, ValueMeta.getTypeDesc(ValueMetaInterface.TYPE_STRING));
        }
        wFields.removeEmptyRows();
        wFields.setRowNums();
        wFields.optWidth(true);
        getInfo(meta);
        TextFileCSVImportProgressDialog pd = new TextFileCSVImportProgressDialog(shell, meta, transMeta, reader, samples, true);
        String message = pd.open();
        if (message != null) {
            wFields.removeAll();
            // OK, what's the result of our search?
            getData(meta);
            wFields.removeEmptyRows();
            wFields.setRowNums();
            wFields.optWidth(true);
            EnterTextDialog etd = new EnterTextDialog(shell, Messages.getString("S3CsvInputDialog.ScanResults.DialogTitle"), Messages.getString("S3CsvInputDialog.ScanResults.DialogMessage"), message, true);
            etd.setReadOnly();
            etd.open();
        }
    } catch (IOException e) {
        new ErrorDialog(shell, Messages.getString("S3CsvInputDialog.IOError.DialogTitle"), Messages.getString("S3CsvInputDialog.IOError.DialogMessage"), e);
    } catch (Exception e) {
        new ErrorDialog(shell, Messages.getString("System.Dialog.Error.Title"), Messages.getString("S3CsvInputDialog.ErrorGettingFileDesc.DialogMessage"), e);
    } finally {
        try {
            if (inputStream != null) {
                inputStream.close();
            }
        } catch (Exception e) {
            log.logError(stepname, "Error closing s3 data input stream", e);
        }
    }
}
Also used : TextFileCSVImportProgressDialog(org.pentaho.di.ui.trans.steps.textfileinput.TextFileCSVImportProgressDialog) InputStreamReader(java.io.InputStreamReader) InputStream(java.io.InputStream) DecimalFormat(java.text.DecimalFormat) TableItem(org.eclipse.swt.widgets.TableItem) ErrorDialog(org.pentaho.di.ui.core.dialog.ErrorDialog) IOException(java.io.IOException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) IOException(java.io.IOException) Bucket(com.amazonaws.services.s3.model.Bucket) EnterTextDialog(org.pentaho.di.ui.core.dialog.EnterTextDialog) EnterNumberDialog(org.pentaho.di.ui.core.dialog.EnterNumberDialog)

Example 98 with Bucket

use of software.amazon.awssdk.services.s3.model.Bucket in project kylo by Teradata.

the class S3FileSystemProviderTest method listFilesS3n.

/**
 * Verify listing buckets using the s3n scheme.
 */
@Test
@SuppressWarnings("unchecked")
public void listFilesS3n() {
    // Mock client
    final AmazonS3 client = Mockito.mock(AmazonS3.class);
    final Bucket bucket1 = AmazonS3Util.createBucket("bucket1");
    final Bucket bucket2 = AmazonS3Util.createBucket("bucket2");
    Mockito.when(client.listBuckets()).thenReturn(Arrays.asList(bucket1, bucket2));
    // Test listing buckets
    final S3FileSystemProvider provider = new S3FileSystemProvider() {

        @Override
        protected AmazonS3 createS3Client(@Nonnull final URI uri, @Nonnull final Configuration conf) {
            return client;
        }
    };
    final List<DataSetFile> files = provider.listFiles(new Path(S3N), new Configuration(false));
    Assert.assertThat(files, CoreMatchers.hasItems(isDataSetFile(bucket1), isDataSetFile(bucket2)));
    Assert.assertEquals(2, files.size());
}
Also used : Path(org.apache.hadoop.fs.Path) AmazonS3(com.amazonaws.services.s3.AmazonS3) Configuration(org.apache.hadoop.conf.Configuration) Bucket(com.amazonaws.services.s3.model.Bucket) Nonnull(javax.annotation.Nonnull) URI(java.net.URI) DataSetFile(com.thinkbiganalytics.kylo.catalog.rest.model.DataSetFile) Test(org.junit.Test)

Example 99 with Bucket

use of software.amazon.awssdk.services.s3.model.Bucket in project kylo by Teradata.

the class S3FileSystemProviderTest method createBucket.

/**
 * Creates an S3 bucket with the specified name.
 */
@Nonnull
private Bucket createBucket(@Nonnull final String name) {
    final Bucket bucket = new Bucket(name);
    bucket.setCreationDate(new Date());
    return bucket;
}
Also used : Bucket(com.amazonaws.services.s3.model.Bucket) Date(java.util.Date) Nonnull(javax.annotation.Nonnull)

Example 100 with Bucket

use of software.amazon.awssdk.services.s3.model.Bucket in project kylo by Teradata.

the class AmazonS3Util method createBucket.

/**
 * Creates an S3 bucket with the specified name.
 */
@Nonnull
@SuppressWarnings("WeakerAccess")
public static Bucket createBucket(@Nonnull final String name) {
    final Bucket bucket = new Bucket(name);
    bucket.setCreationDate(new Date());
    return bucket;
}
Also used : Bucket(com.amazonaws.services.s3.model.Bucket) Date(java.util.Date) Nonnull(javax.annotation.Nonnull)

Aggregations

S3Exception (software.amazon.awssdk.services.s3.model.S3Exception)60 S3Client (software.amazon.awssdk.services.s3.S3Client)53 Region (software.amazon.awssdk.regions.Region)47 Bucket (com.amazonaws.services.s3.model.Bucket)32 ArrayList (java.util.ArrayList)24 Test (org.junit.Test)22 IOException (java.io.IOException)18 GetObjectRequest (software.amazon.awssdk.services.s3.model.GetObjectRequest)18 S3Object (software.amazon.awssdk.services.s3.model.S3Object)17 GetObjectResponse (software.amazon.awssdk.services.s3.model.GetObjectResponse)14 HeadObjectResponse (software.amazon.awssdk.services.s3.model.HeadObjectResponse)14 PutObjectRequest (software.amazon.awssdk.services.s3.model.PutObjectRequest)14 ListObjectsV2Response (software.amazon.awssdk.services.s3.model.ListObjectsV2Response)11 AmazonS3 (com.amazonaws.services.s3.AmazonS3)10 ListObjectsV2Request (software.amazon.awssdk.services.s3.model.ListObjectsV2Request)10 CompleteMultipartUploadRequest (software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest)9 S3TestUtils.buildMockedS3FileSystem (org.apache.beam.sdk.io.aws2.s3.S3TestUtils.buildMockedS3FileSystem)8 MatchResult (org.apache.beam.sdk.io.fs.MatchResult)8 Date (java.util.Date)7 CompletedPart (software.amazon.awssdk.services.s3.model.CompletedPart)7