Search in sources :

Example 86 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class MD5FileUtils method readStoredMd5.

/**
   * Read the md5 file stored alongside the given data file
   * and match the md5 file content.
   * @param dataFile the file containing data
   * @return a matcher with two matched groups
   *   where group(1) is the md5 string and group(2) is the data file path.
   */
private static Matcher readStoredMd5(File md5File) throws IOException {
    BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(md5File), Charsets.UTF_8));
    String md5Line;
    try {
        md5Line = reader.readLine();
        if (md5Line == null) {
            md5Line = "";
        }
        md5Line = md5Line.trim();
    } catch (IOException ioe) {
        throw new IOException("Error reading md5 file at " + md5File, ioe);
    } finally {
        IOUtils.cleanup(LOG, reader);
    }
    Matcher matcher = LINE_REGEX.matcher(md5Line);
    if (!matcher.matches()) {
        throw new IOException("Invalid MD5 file " + md5File + ": the content \"" + md5Line + "\" does not match the expected pattern.");
    }
    return matcher;
}
Also used : InputStreamReader(java.io.InputStreamReader) Matcher(java.util.regex.Matcher) BufferedReader(java.io.BufferedReader) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream)

Example 87 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class MD5FileUtils method computeMd5ForFile.

/**
   * Read dataFile and compute its MD5 checksum.
   */
public static MD5Hash computeMd5ForFile(File dataFile) throws IOException {
    InputStream in = new FileInputStream(dataFile);
    try {
        MessageDigest digester = MD5Hash.getDigester();
        DigestInputStream dis = new DigestInputStream(in, digester);
        IOUtils.copyBytes(dis, new IOUtils.NullOutputStream(), 128 * 1024);
        return new MD5Hash(digester.digest());
    } finally {
        IOUtils.closeStream(in);
    }
}
Also used : IOUtils(org.apache.hadoop.io.IOUtils) DigestInputStream(java.security.DigestInputStream) FileInputStream(java.io.FileInputStream) DigestInputStream(java.security.DigestInputStream) InputStream(java.io.InputStream) MD5Hash(org.apache.hadoop.io.MD5Hash) MessageDigest(java.security.MessageDigest) FileInputStream(java.io.FileInputStream)

Example 88 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class PersistentLongFile method readFile.

public static long readFile(File file, long defaultVal) throws IOException {
    long val = defaultVal;
    if (file.exists()) {
        BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(file), Charsets.UTF_8));
        try {
            val = Long.parseLong(br.readLine());
            br.close();
            br = null;
        } finally {
            IOUtils.cleanup(LOG, br);
        }
    }
    return val;
}
Also used : InputStreamReader(java.io.InputStreamReader) BufferedReader(java.io.BufferedReader) FileInputStream(java.io.FileInputStream)

Example 89 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class FSImageTestUtil method assertPropertiesFilesSame.

/**
   * Assert that a set of properties files all contain the same data.
   *
   * @param propFiles the files to compare.
   * @param ignoredProperties the property names to be ignored during
   *                          comparison.
   * @throws IOException if the files cannot be opened or read
   * @throws AssertionError if the files differ
   */
public static void assertPropertiesFilesSame(File[] propFiles, Set<String> ignoredProperties) throws IOException {
    Set<Map.Entry<Object, Object>> prevProps = null;
    for (File f : propFiles) {
        Properties props;
        FileInputStream is = new FileInputStream(f);
        try {
            props = new Properties();
            props.load(is);
        } finally {
            IOUtils.closeStream(is);
        }
        if (prevProps == null) {
            prevProps = props.entrySet();
        } else {
            Set<Entry<Object, Object>> diff = Sets.symmetricDifference(prevProps, props.entrySet());
            Iterator<Entry<Object, Object>> it = diff.iterator();
            while (it.hasNext()) {
                Entry<Object, Object> entry = it.next();
                if (ignoredProperties != null && ignoredProperties.contains(entry.getKey())) {
                    continue;
                }
                fail("Properties file " + f + " differs from " + propFiles[0]);
            }
        }
    }
}
Also used : Entry(java.util.Map.Entry) Properties(java.util.Properties) RandomAccessFile(java.io.RandomAccessFile) EditLogFile(org.apache.hadoop.hdfs.server.namenode.FileJournalManager.EditLogFile) FSImageFile(org.apache.hadoop.hdfs.server.namenode.FSImageStorageInspector.FSImageFile) File(java.io.File) FileInputStream(java.io.FileInputStream)

Example 90 with FileInputStream

use of java.io.FileInputStream in project hadoop by apache.

the class AliyunOSSFileSystemStore method uploadObject.

/**
   * Upload a file as an OSS object, using single upload.
   *
   * @param key object key.
   * @param file local file to upload.
   * @throws IOException if failed to upload object.
   */
public void uploadObject(String key, File file) throws IOException {
    File object = file.getAbsoluteFile();
    FileInputStream fis = new FileInputStream(object);
    ObjectMetadata meta = new ObjectMetadata();
    meta.setContentLength(object.length());
    if (StringUtils.isNotEmpty(serverSideEncryptionAlgorithm)) {
        meta.setServerSideEncryption(serverSideEncryptionAlgorithm);
    }
    try {
        PutObjectResult result = ossClient.putObject(bucketName, key, fis, meta);
        LOG.debug(result.getETag());
        statistics.incrementWriteOps(1);
    } finally {
        fis.close();
    }
}
Also used : PutObjectResult(com.aliyun.oss.model.PutObjectResult) File(java.io.File) ObjectMetadata(com.aliyun.oss.model.ObjectMetadata) FileInputStream(java.io.FileInputStream)

Aggregations

FileInputStream (java.io.FileInputStream)5931 File (java.io.File)2701 IOException (java.io.IOException)2699 InputStream (java.io.InputStream)1514 FileOutputStream (java.io.FileOutputStream)922 FileNotFoundException (java.io.FileNotFoundException)876 BufferedInputStream (java.io.BufferedInputStream)744 InputStreamReader (java.io.InputStreamReader)680 BufferedReader (java.io.BufferedReader)558 Properties (java.util.Properties)552 Test (org.junit.Test)543 ArrayList (java.util.ArrayList)320 DataInputStream (java.io.DataInputStream)288 OutputStream (java.io.OutputStream)273 ByteArrayInputStream (java.io.ByteArrayInputStream)238 ZipEntry (java.util.zip.ZipEntry)221 XmlPullParserException (org.xmlpull.v1.XmlPullParserException)200 HashMap (java.util.HashMap)195 XmlPullParser (org.xmlpull.v1.XmlPullParser)184 ByteArrayOutputStream (java.io.ByteArrayOutputStream)177