Search in sources :

Example 91 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class AliyunOSSFileSystemStore method multipartUploadObject.

/**
   * Upload a file as an OSS object, using multipart upload.
   *
   * @param key object key.
   * @param file local file to upload.
   * @throws IOException if failed to upload object.
   */
public void multipartUploadObject(String key, File file) throws IOException {
    File object = file.getAbsoluteFile();
    long dataLen = object.length();
    long realPartSize = AliyunOSSUtils.calculatePartSize(dataLen, partSize);
    int partNum = (int) (dataLen / realPartSize);
    if (dataLen % realPartSize != 0) {
        partNum += 1;
    }
    InitiateMultipartUploadRequest initiateMultipartUploadRequest = new InitiateMultipartUploadRequest(bucketName, key);
    ObjectMetadata meta = new ObjectMetadata();
    if (StringUtils.isNotEmpty(serverSideEncryptionAlgorithm)) {
        meta.setServerSideEncryption(serverSideEncryptionAlgorithm);
    }
    initiateMultipartUploadRequest.setObjectMetadata(meta);
    InitiateMultipartUploadResult initiateMultipartUploadResult = ossClient.initiateMultipartUpload(initiateMultipartUploadRequest);
    List<PartETag> partETags = new ArrayList<PartETag>();
    String uploadId = initiateMultipartUploadResult.getUploadId();
    try {
        for (int i = 0; i < partNum; i++) {
            // TODO: Optimize this, avoid opening the object multiple times
            FileInputStream fis = new FileInputStream(object);
            try {
                long skipBytes = realPartSize * i;
                AliyunOSSUtils.skipFully(fis, skipBytes);
                long size = (realPartSize < dataLen - skipBytes) ? realPartSize : dataLen - skipBytes;
                UploadPartRequest uploadPartRequest = new UploadPartRequest();
                uploadPartRequest.setBucketName(bucketName);
                uploadPartRequest.setKey(key);
                uploadPartRequest.setUploadId(uploadId);
                uploadPartRequest.setInputStream(fis);
                uploadPartRequest.setPartSize(size);
                uploadPartRequest.setPartNumber(i + 1);
                UploadPartResult uploadPartResult = ossClient.uploadPart(uploadPartRequest);
                statistics.incrementWriteOps(1);
                partETags.add(uploadPartResult.getPartETag());
            } finally {
                fis.close();
            }
        }
        CompleteMultipartUploadRequest completeMultipartUploadRequest = new CompleteMultipartUploadRequest(bucketName, key, uploadId, partETags);
        CompleteMultipartUploadResult completeMultipartUploadResult = ossClient.completeMultipartUpload(completeMultipartUploadRequest);
        LOG.debug(completeMultipartUploadResult.getETag());
    } catch (OSSException | ClientException e) {
        AbortMultipartUploadRequest abortMultipartUploadRequest = new AbortMultipartUploadRequest(bucketName, key, uploadId);
        ossClient.abortMultipartUpload(abortMultipartUploadRequest);
    }
}
Also used : InitiateMultipartUploadResult(com.aliyun.oss.model.InitiateMultipartUploadResult) InitiateMultipartUploadRequest(com.aliyun.oss.model.InitiateMultipartUploadRequest) ArrayList(java.util.ArrayList) UploadPartRequest(com.aliyun.oss.model.UploadPartRequest) OSSException(com.aliyun.oss.OSSException) AbortMultipartUploadRequest(com.aliyun.oss.model.AbortMultipartUploadRequest) CompleteMultipartUploadResult(com.aliyun.oss.model.CompleteMultipartUploadResult) PartETag(com.aliyun.oss.model.PartETag) FileInputStream(java.io.FileInputStream) UploadPartResult(com.aliyun.oss.model.UploadPartResult) ClientException(com.aliyun.oss.ClientException) File(java.io.File) ObjectMetadata(com.aliyun.oss.model.ObjectMetadata) CompleteMultipartUploadRequest(com.aliyun.oss.model.CompleteMultipartUploadRequest)

Example 92 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class RegexCopyFilter method initialize.

/**
   * Loads a list of filter patterns for use in shouldCopy.
   */
@Override
public void initialize() {
    BufferedReader reader = null;
    try {
        InputStream is = new FileInputStream(filtersFile);
        reader = new BufferedReader(new InputStreamReader(is, Charset.forName("UTF-8")));
        String line;
        while ((line = reader.readLine()) != null) {
            Pattern pattern = Pattern.compile(line);
            filters.add(pattern);
        }
    } catch (FileNotFoundException notFound) {
        LOG.error("Can't find filters file " + filtersFile);
    } catch (IOException cantRead) {
        LOG.error("An error occurred while attempting to read from " + filtersFile);
    } finally {
        IOUtils.cleanup(LOG, reader);
    }
}
Also used : Pattern(java.util.regex.Pattern) InputStreamReader(java.io.InputStreamReader) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) BufferedReader(java.io.BufferedReader) FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream)

Example 93 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class RumenToSLSConverter method generateSLSLoadFile.

private static void generateSLSLoadFile(String inputFile, String outputFile) throws IOException {
    try (Reader input = new InputStreamReader(new FileInputStream(inputFile), "UTF-8")) {
        try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
            ObjectMapper mapper = new ObjectMapper();
            ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
            Iterator<Map> i = mapper.readValues(new JsonFactory().createParser(input), Map.class);
            while (i.hasNext()) {
                Map m = i.next();
                output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
            }
        }
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) FileOutputStream(java.io.FileOutputStream) JsonFactory(com.fasterxml.jackson.core.JsonFactory) Reader(java.io.Reader) InputStreamReader(java.io.InputStreamReader) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) OutputStreamWriter(java.io.OutputStreamWriter) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) TreeMap(java.util.TreeMap) FileInputStream(java.io.FileInputStream) OutputStreamWriter(java.io.OutputStreamWriter) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) Writer(java.io.Writer) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 94 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class TestFileOutputCommitter method slurp.

public static String slurp(File f) throws IOException {
    int len = (int) f.length();
    byte[] buf = new byte[len];
    FileInputStream in = new FileInputStream(f);
    String contents = null;
    try {
        in.read(buf, 0, len);
        contents = new String(buf, "UTF-8");
    } finally {
        in.close();
    }
    return contents;
}
Also used : FileInputStream(java.io.FileInputStream)

Example 95 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class TestLineRecordReader method readRecordsDirectly.

// Gather the records by just splitting on new lines
public String[] readRecordsDirectly(URL testFileUrl, boolean bzip) throws IOException {
    int MAX_DATA_SIZE = 1024 * 1024;
    byte[] data = new byte[MAX_DATA_SIZE];
    FileInputStream fis = new FileInputStream(testFileUrl.getFile());
    int count;
    if (bzip) {
        BZip2CompressorInputStream bzIn = new BZip2CompressorInputStream(fis);
        count = bzIn.read(data);
        bzIn.close();
    } else {
        count = fis.read(data);
    }
    fis.close();
    assertTrue("Test file data too big for buffer", count < data.length);
    return new String(data, 0, count, "UTF-8").split("\n");
}
Also used : BZip2CompressorInputStream(org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream) FileInputStream(java.io.FileInputStream)

Aggregations

FileInputStream (java.io.FileInputStream)5931 File (java.io.File)2701 IOException (java.io.IOException)2699 InputStream (java.io.InputStream)1514 FileOutputStream (java.io.FileOutputStream)922 FileNotFoundException (java.io.FileNotFoundException)876 BufferedInputStream (java.io.BufferedInputStream)744 InputStreamReader (java.io.InputStreamReader)680 BufferedReader (java.io.BufferedReader)558 Properties (java.util.Properties)552 Test (org.junit.Test)543 ArrayList (java.util.ArrayList)320 DataInputStream (java.io.DataInputStream)288 OutputStream (java.io.OutputStream)273 ByteArrayInputStream (java.io.ByteArrayInputStream)238 ZipEntry (java.util.zip.ZipEntry)221 XmlPullParserException (org.xmlpull.v1.XmlPullParserException)200 HashMap (java.util.HashMap)195 XmlPullParser (org.xmlpull.v1.XmlPullParser)184 ByteArrayOutputStream (java.io.ByteArrayOutputStream)177