Search in sources :

Example 1 with LimitInputStream

use of com.google.common.io.LimitInputStream in project apex-malhar by apache.

the class AbstractFileOutputOperatorTest method checkSnappyFile.

private void checkSnappyFile(File file, List<Long> offsets, int startVal, int totalWindows, int totalRecords) throws IOException {
    FileInputStream fis;
    InputStream gss = null;
    Configuration conf = new Configuration();
    CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(SnappyCodec.class, conf);
    CompressionInputStream snappyIs = null;
    BufferedReader br = null;
    int numWindows = 0;
    try {
        fis = new FileInputStream(file);
        gss = fis;
        long startOffset = 0;
        for (long offset : offsets) {
            // Skip initial case in case file is not yet created
            if (offset == 0) {
                continue;
            }
            long limit = offset - startOffset;
            LimitInputStream lis = new LimitInputStream(gss, limit);
            snappyIs = codec.createInputStream(lis);
            br = new BufferedReader(new InputStreamReader(snappyIs));
            String eline = "" + (startVal + numWindows * 2);
            int count = 0;
            String line;
            while ((line = br.readLine()) != null) {
                Assert.assertEquals("File line", eline, line);
                ++count;
                if ((count % totalRecords) == 0) {
                    ++numWindows;
                    eline = "" + (startVal + numWindows * 2);
                }
            }
            startOffset = offset;
        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (br != null) {
            br.close();
        } else {
            if (snappyIs != null) {
                snappyIs.close();
            } else if (gss != null) {
                gss.close();
            }
        }
    }
    Assert.assertEquals("Total", totalWindows, numWindows);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) InputStreamReader(java.io.InputStreamReader) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) GZIPInputStream(java.util.zip.GZIPInputStream) LimitInputStream(com.google.common.io.LimitInputStream) CipherInputStream(javax.crypto.CipherInputStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) LimitInputStream(com.google.common.io.LimitInputStream) FileInputStream(java.io.FileInputStream) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) IOException(java.io.IOException) ConstraintViolationException(javax.validation.ConstraintViolationException) BufferedReader(java.io.BufferedReader) CompressionCodec(org.apache.hadoop.io.compress.CompressionCodec) SnappyCodec(org.apache.hadoop.io.compress.SnappyCodec)

Example 2 with LimitInputStream

use of com.google.common.io.LimitInputStream in project apex-malhar by apache.

the class AbstractFileOutputOperatorTest method checkCompressedFile.

private void checkCompressedFile(File file, List<Long> offsets, int startVal, int totalWindows, int totalRecords, SecretKey secretKey, byte[] iv) throws IOException {
    FileInputStream fis;
    InputStream gss = null;
    GZIPInputStream gis = null;
    BufferedReader br = null;
    Cipher cipher = null;
    if (secretKey != null) {
        try {
            cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
            IvParameterSpec ivps = new IvParameterSpec(iv);
            cipher.init(Cipher.DECRYPT_MODE, secretKey, ivps);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
    int numWindows = 0;
    try {
        fis = new FileInputStream(file);
        // fis.skip(startOffset);
        gss = fis;
        if (secretKey != null) {
            try {
                /*
            Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding");
            IvParameterSpec ivps = new IvParameterSpec(iv);
            cipher.init(Cipher.DECRYPT_MODE, secretKey, ivps);
            */
                gss = new CipherInputStream(fis, cipher);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }
        }
        long startOffset = 0;
        for (long offset : offsets) {
            // Skip initial case in case file is not yet created
            if (offset == 0) {
                continue;
            }
            long limit = offset - startOffset;
            LimitInputStream lis = new LimitInputStream(gss, limit);
            // gis = new GZIPInputStream(fis);
            gis = new GZIPInputStream(lis);
            br = new BufferedReader(new InputStreamReader(gis));
            // br = new BufferedReader(new InputStreamReader(gss));
            String eline = "" + (startVal + numWindows * 2);
            int count = 0;
            String line;
            while ((line = br.readLine()) != null) {
                Assert.assertEquals("File line", eline, line);
                ++count;
                if ((count % totalRecords) == 0) {
                    ++numWindows;
                    eline = "" + (startVal + numWindows * 2);
                }
            }
            startOffset = offset;
        }
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        if (br != null) {
            br.close();
        } else {
            if (gis != null) {
                gis.close();
            } else if (gss != null) {
                gss.close();
            }
        }
    }
    Assert.assertEquals("Total", totalWindows, numWindows);
}
Also used : CipherInputStream(javax.crypto.CipherInputStream) InputStreamReader(java.io.InputStreamReader) GZIPInputStream(java.util.zip.GZIPInputStream) LimitInputStream(com.google.common.io.LimitInputStream) CipherInputStream(javax.crypto.CipherInputStream) CompressionInputStream(org.apache.hadoop.io.compress.CompressionInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) LimitInputStream(com.google.common.io.LimitInputStream) FileInputStream(java.io.FileInputStream) NoSuchAlgorithmException(java.security.NoSuchAlgorithmException) IOException(java.io.IOException) ConstraintViolationException(javax.validation.ConstraintViolationException) GZIPInputStream(java.util.zip.GZIPInputStream) BufferedReader(java.io.BufferedReader) IvParameterSpec(javax.crypto.spec.IvParameterSpec) Cipher(javax.crypto.Cipher)

Aggregations

LimitInputStream (com.google.common.io.LimitInputStream)2 BufferedReader (java.io.BufferedReader)2 FileInputStream (java.io.FileInputStream)2 IOException (java.io.IOException)2 InputStream (java.io.InputStream)2 InputStreamReader (java.io.InputStreamReader)2 NoSuchAlgorithmException (java.security.NoSuchAlgorithmException)2 GZIPInputStream (java.util.zip.GZIPInputStream)2 CipherInputStream (javax.crypto.CipherInputStream)2 ConstraintViolationException (javax.validation.ConstraintViolationException)2 CompressionInputStream (org.apache.hadoop.io.compress.CompressionInputStream)2 Cipher (javax.crypto.Cipher)1 IvParameterSpec (javax.crypto.spec.IvParameterSpec)1 Configuration (org.apache.hadoop.conf.Configuration)1 CompressionCodec (org.apache.hadoop.io.compress.CompressionCodec)1 SnappyCodec (org.apache.hadoop.io.compress.SnappyCodec)1