Search in sources :

Example 11 with ReadOnlyStorageMetadata

use of voldemort.store.readonly.ReadOnlyStorageMetadata in project voldemort by voldemort.

the class HadoopStoreBuilderUtilsTest method testReadFileContents.

@Test
public void testReadFileContents() throws Exception {
    Path testPath = new Path(TestUtils.createTempDir().getAbsolutePath(), "tempFile");
    FileSystem fs = testPath.getFileSystem(new Configuration());
    fs.create(testPath);
    // 1) Read back empty file
    String emptyString = HadoopStoreBuilderUtils.readFileContents(fs, testPath, 1024);
    Assert.assertEquals(emptyString.length(), 0);
    // 2) Read back random bytes
    byte[] randomBytes = writeRandomData(testPath, 10);
    // Read back data
    Assert.assertEquals(HadoopStoreBuilderUtils.readFileContents(fs, testPath, 1024), new String(randomBytes));
    // 3) Write a json string
    fs.delete(testPath, true);
    fs.create(testPath);
    ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata();
    metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V2.getCode());
    // Write file contents
    new FileOutputStream(testPath.toString()).write(metadata.toJsonString().getBytes());
    ReadOnlyStorageMetadata readMetadata = new ReadOnlyStorageMetadata(HadoopStoreBuilderUtils.readFileContents(fs, testPath, 1024));
    Assert.assertEquals(readMetadata.get(ReadOnlyStorageMetadata.FORMAT), ReadOnlyStorageFormat.READONLY_V2.getCode());
}
Also used : Path(org.apache.hadoop.fs.Path) ReadOnlyStorageMetadata(voldemort.store.readonly.ReadOnlyStorageMetadata) Configuration(org.apache.hadoop.conf.Configuration) FileSystem(org.apache.hadoop.fs.FileSystem) FileOutputStream(java.io.FileOutputStream) Test(org.junit.Test)

Example 12 with ReadOnlyStorageMetadata

use of voldemort.store.readonly.ReadOnlyStorageMetadata in project voldemort by voldemort.

the class HdfsFetcherAdvancedTest method testCorruptedCompressedFile.

/*
     * Tests that corrupted compressed stream triggers exception when servers
     * starts to decompress.
     *
     * 1. We produce random bytes in index and data files
     *
     * 2. We rename them to end with ".gz" to simulate corrupted compressed
     * streams
     *
     * 3. We run the fetcher. Fetcher would see the ".gz" extension and starts
     * decompressing and does not find right GZIP headers . Thus produces
     * exception.
     */
@Test
public void testCorruptedCompressedFile() throws Exception {
    if (!isCompressed) {
        return;
    }
    testSourceDir = createTempDir();
    File testUncompressedSourceDir = null;
    // generate index , data and , metadata files
    File indexFile = new File(testSourceDir, "0_0.index");
    FileUtils.writeByteArrayToFile(indexFile, TestUtils.randomBytes(100));
    File dataFile = new File(testSourceDir, "0_0.data");
    FileUtils.writeByteArrayToFile(dataFile, TestUtils.randomBytes(400));
    HdfsFetcher fetcher = new HdfsFetcher();
    File metadataFile = new File(testSourceDir, ".metadata");
    ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata();
    metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V2.getCode());
    metadata.add(ReadOnlyStorageMetadata.CHECKSUM_TYPE, CheckSum.toString(CheckSumType.MD5));
    // Correct metadata checksum - MD5
    metadata.add(ReadOnlyStorageMetadata.CHECKSUM, new String(Hex.encodeHex(CheckSumTests.calculateCheckSum(testSourceDir.listFiles(), CheckSumType.MD5))));
    FileUtils.writeStringToFile(metadataFile, metadata.toJsonString());
    if (!indexFile.renameTo(new File(testSourceDir, indexFile.getName() + ".gz")) || !dataFile.renameTo(new File(testSourceDir, dataFile.getName() + ".gz"))) {
        Assert.fail("cannot rename files as desired");
    }
    testDestDir = new File(testSourceDir.getAbsolutePath() + "1");
    if (testDestDir.exists()) {
        deleteDir(testDestDir);
    }
    File fetchedFile;
    try {
        fetchedFile = fetcher.fetch(testSourceDir.getAbsolutePath(), testDestDir.getAbsolutePath());
        Assert.fail("Unexpected! Fetch should have failed, but instead, successfully got: " + fetchedFile);
    } catch (VoldemortException ex) {
        // this is expected, since we did not send valid compressed file
        cleanUp();
        return;
    } catch (Exception ex) {
        // Any other exception is not acceptable. Fail the test case
        cleanUp();
        Assert.fail("Unexpected Exception thrown!");
    }
}
Also used : ReadOnlyStorageMetadata(voldemort.store.readonly.ReadOnlyStorageMetadata) File(java.io.File) VoldemortException(voldemort.VoldemortException) EofException(org.mortbay.jetty.EofException) UnauthorizedStoreException(voldemort.store.readonly.UnauthorizedStoreException) VoldemortException(voldemort.VoldemortException) IOException(java.io.IOException) QuotaExceededException(voldemort.store.quota.QuotaExceededException) Test(org.junit.Test)

Example 13 with ReadOnlyStorageMetadata

use of voldemort.store.readonly.ReadOnlyStorageMetadata in project voldemort by voldemort.

the class HdfsFetcherAdvancedTest method testForDiskQuota.

private void testForDiskQuota(int diskQuotaInKB, int actualDataSizeInBytes) throws Exception {
    cleanUp();
    if (testSourceDir != null && testSourceDir.exists()) {
        deleteDir(testSourceDir);
    }
    testSourceDir = createTempDir();
    File testUncompressedSourceDir = null;
    int indexFileSize = actualDataSizeInBytes / 4;
    int dataFileSize = actualDataSizeInBytes - indexFileSize;
    // generate index , data and , metadata files
    File indexFile = new File(testSourceDir, "0_0.index");
    FileUtils.writeByteArrayToFile(indexFile, TestUtils.randomBytes(indexFileSize));
    File dataFile = new File(testSourceDir, "0_0.data");
    FileUtils.writeByteArrayToFile(dataFile, TestUtils.randomBytes(dataFileSize));
    HdfsFetcher fetcher = new HdfsFetcher();
    File metadataFile = new File(testSourceDir, ".metadata");
    ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata();
    metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V2.getCode());
    metadata.add(ReadOnlyStorageMetadata.CHECKSUM_TYPE, CheckSum.toString(CheckSumType.MD5));
    // Correct metadata checksum - MD5
    byte[] computedCheksum = CheckSumTests.calculateCheckSum(testSourceDir.listFiles(), CheckSumType.MD5);
    metadata.add(ReadOnlyStorageMetadata.CHECKSUM, new String(Hex.encodeHex(computedCheksum)));
    metadata.add(ReadOnlyStorageMetadata.DISK_SIZE_IN_BYTES, Integer.toString(actualDataSizeInBytes));
    FileUtils.writeStringToFile(metadataFile, metadata.toJsonString());
    /*
         * if isCompressed == true replace .index and .data files with their
         * compressed files before invoking fetch. Move the original
         * uncompressed .index and .data files to a temporary location so they
         * can be used later to check for data equality.
         */
    if (isCompressed) {
        String destIndexPath = indexFile.getAbsolutePath() + ".gz";
        gzipFile(indexFile.getAbsolutePath(), destIndexPath);
        String destDataPath = dataFile.getAbsolutePath() + ".gz";
        gzipFile(dataFile.getAbsolutePath(), destDataPath);
        testUncompressedSourceDir = new File(testSourceDir.getAbsolutePath() + "-uncompressed");
        testUncompressedSourceDir.delete();
        testUncompressedSourceDir.mkdir();
        if (!indexFile.renameTo(new File(testUncompressedSourceDir, indexFile.getName())) || !dataFile.renameTo(new File(testUncompressedSourceDir, dataFile.getName()))) {
            throw new Exception("cannot move irrelevant files");
        }
    }
    testDestDir = new File(testSourceDir.getAbsolutePath() + "1");
    if (testDestDir.exists()) {
        deleteDir(testDestDir);
    }
    File fetchedFile = fetcher.fetch(testSourceDir.getAbsolutePath(), testDestDir.getAbsolutePath(), diskQuotaInKB);
    assertNotNull(fetchedFile);
    assertEquals(fetchedFile.getAbsolutePath(), testDestDir.getAbsolutePath());
    if (isCompressed) {
        for (File file : testUncompressedSourceDir.listFiles()) {
            if (file.isFile()) {
                Assert.assertTrue(ReadOnlyTestUtils.areTwoBinaryFilesEqual(file, new File(testDestDir, file.getName())));
            }
        }
    }
    if (testDestDir.exists()) {
        deleteDir(testDestDir);
    }
}
Also used : ReadOnlyStorageMetadata(voldemort.store.readonly.ReadOnlyStorageMetadata) File(java.io.File) EofException(org.mortbay.jetty.EofException) UnauthorizedStoreException(voldemort.store.readonly.UnauthorizedStoreException) VoldemortException(voldemort.VoldemortException) IOException(java.io.IOException) QuotaExceededException(voldemort.store.quota.QuotaExceededException)

Example 14 with ReadOnlyStorageMetadata

use of voldemort.store.readonly.ReadOnlyStorageMetadata in project voldemort by voldemort.

the class HdfsFetcherTest method testAggStatsWithQuotaExceedException.

public void testAggStatsWithQuotaExceedException() throws Exception {
    HdfsFetcherAggStats stats = HdfsFetcherAggStats.getStats();
    long totalBytesFetchedBefore = stats.getTotalBytesFetched();
    long totalQuotaExceedFailuresBefore = stats.getTotalQuotaExceedFailures();
    long totalFetchesBefore = stats.getTotalFetches();
    long totalIncompleteFetchesBefore = stats.getTotalIncompleteFetches();
    // Generate 0_0.[index | data] and their corresponding metadata
    File testSourceDirectory = TestUtils.createTempDir();
    File testDestinationDirectory = TestUtils.createTempDir();
    // Missing metadata file
    File indexFile = new File(testSourceDirectory, "0_0.index");
    FileUtils.writeByteArrayToFile(indexFile, TestUtils.randomBytes(1000));
    File dataFile = new File(testSourceDirectory, "0_0.data");
    FileUtils.writeByteArrayToFile(dataFile, TestUtils.randomBytes(4000));
    File metadataFile = new File(testSourceDirectory, ".metadata");
    ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata();
    metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V2.getCode());
    metadata.add(ReadOnlyStorageMetadata.CHECKSUM, new String(Hex.encodeHex(CheckSumTests.calculateCheckSum(testSourceDirectory.listFiles(), CheckSumType.MD5))));
    metadata.add(ReadOnlyStorageMetadata.DISK_SIZE_IN_BYTES, "5000");
    FileUtils.writeStringToFile(metadataFile, metadata.toJsonString());
    HdfsFetcher fetcher = new HdfsFetcher();
    File fetchedFile = null;
    try {
        fetchedFile = fetcher.fetch(testSourceDirectory.getAbsolutePath(), testDestinationDirectory.getAbsolutePath() + "1", 1);
    } catch (Exception e) {
    }
    assertNull(fetchedFile);
    // The total bytes fetched includes meta data file as well.
    assertEquals(totalBytesFetchedBefore + metadata.toJsonString().length(), stats.getTotalBytesFetched());
    assertEquals(totalQuotaExceedFailuresBefore + 1, stats.getTotalQuotaExceedFailures());
    assertEquals(totalFetchesBefore + 1, stats.getTotalFetches());
    assertEquals(totalIncompleteFetchesBefore + 1, stats.getTotalIncompleteFetches());
}
Also used : ReadOnlyStorageMetadata(voldemort.store.readonly.ReadOnlyStorageMetadata) File(java.io.File) VoldemortException(voldemort.VoldemortException)

Aggregations

ReadOnlyStorageMetadata (voldemort.store.readonly.ReadOnlyStorageMetadata)14 File (java.io.File)12 VoldemortException (voldemort.VoldemortException)10 IOException (java.io.IOException)5 Path (org.apache.hadoop.fs.Path)4 Test (org.junit.Test)4 QuotaExceededException (voldemort.store.quota.QuotaExceededException)4 UnauthorizedStoreException (voldemort.store.readonly.UnauthorizedStoreException)4 FileOutputStream (java.io.FileOutputStream)3 FileSystem (org.apache.hadoop.fs.FileSystem)3 EofException (org.mortbay.jetty.EofException)3 BufferedWriter (java.io.BufferedWriter)2 FileWriter (java.io.FileWriter)2 ArrayList (java.util.ArrayList)2 List (java.util.List)2 JobConf (org.apache.hadoop.mapred.JobConf)2 HashMap (java.util.HashMap)1 Map (java.util.Map)1 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)1 ObjectName (javax.management.ObjectName)1