Search in sources :

Example 11 with MockFile

use of org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile in project hive by apache.

the class TestAcidUtils method testParsing.

@Test
public void testParsing() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, // new MockFile("mock:/tmp/base_000123/bucket_00001", 500, new byte[0]),
    new MockFile("mock:/tmp/delta_000005_000006/bucket_00001", 500, new byte[0]), new MockFile("mock:/tmp/delete_delta_000005_000006/bucket_00001", 500, new byte[0]));
    assertEquals(123, AcidUtils.ParsedBaseLight.parseBase(new Path("/tmp/base_000123")).getWriteId());
    assertEquals(0, AcidUtils.ParsedBaseLight.parseBase(new Path("/tmp/base_000123")).getVisibilityTxnId());
    Path dir = new Path("mock:/tmp/");
    AcidOutputFormat.Options opts = AcidUtils.parseBaseOrDeltaBucketFilename(new Path(dir, "base_567/bucket_123"), conf);
    assertEquals(false, opts.getOldStyle());
    assertEquals(true, opts.isWritingBase());
    assertEquals(567, opts.getMaximumWriteId());
    assertEquals(0, opts.getMinimumWriteId());
    assertEquals(123, opts.getBucketId());
    opts = AcidUtils.parseBaseOrDeltaBucketFilename(new MockPath(fs, dir + "/delta_000005_000006/bucket_00001"), conf);
    assertEquals(false, opts.getOldStyle());
    assertEquals(false, opts.isWritingBase());
    assertEquals(6, opts.getMaximumWriteId());
    assertEquals(5, opts.getMinimumWriteId());
    assertEquals(1, opts.getBucketId());
    opts = AcidUtils.parseBaseOrDeltaBucketFilename(new MockPath(fs, dir + "/delete_delta_000005_000006/bucket_00001"), conf);
    assertEquals(false, opts.getOldStyle());
    assertEquals(false, opts.isWritingBase());
    assertEquals(6, opts.getMaximumWriteId());
    assertEquals(5, opts.getMinimumWriteId());
    assertEquals(1, opts.getBucketId());
    opts = AcidUtils.parseBaseOrDeltaBucketFilename(new Path(dir, "000123_0"), conf);
    assertEquals(true, opts.getOldStyle());
    assertEquals(true, opts.isWritingBase());
    assertEquals(123, opts.getBucketId());
    assertEquals(0, opts.getMinimumWriteId());
    assertEquals(0, opts.getMaximumWriteId());
    // Test handling files generated by LOAD DATA command
    opts = AcidUtils.parseBaseOrDeltaBucketFilename(new MockPath(fs, dir + "/delta_0000002_0000002_0000/000012_0"), conf);
    assertEquals(true, opts.getOldStyle());
    assertEquals(false, opts.isWritingBase());
    assertEquals(12, opts.getBucketId());
    assertEquals(2, opts.getMinimumWriteId());
    assertEquals(2, opts.getMaximumWriteId());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) Test(org.junit.Test)

Example 12 with MockFile

use of org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile in project hive by apache.

the class TestAcidUtils method testOverlapingDelta.

@Test
public void testOverlapingDelta() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0]));
    Path part = new MockPath(fs, "mock:/tbl/part1");
    conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString());
    AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false);
    assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString());
    List<Path> obsolete = dir.getObsolete();
    assertEquals(2, obsolete.size());
    assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(0).toString());
    assertEquals("mock:/tbl/part1/delta_0060_60", obsolete.get(1).toString());
    List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories();
    assertEquals(4, delts.size());
    assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_00061_61", delts.get(1).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_000062_62", delts.get(2).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_0000063_63", delts.get(3).getPath().toString());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) BitSet(java.util.BitSet) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) ValidReadTxnList(org.apache.hadoop.hive.common.ValidReadTxnList) Test(org.junit.Test)

Example 13 with MockFile

use of org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile in project hive by apache.

the class TestAcidUtils method testOverlapingDelta2.

/**
 * Hive 1.3.0 delta dir naming scheme which supports multi-statement txns
 * @throws Exception
 */
@Test
public void testOverlapingDelta2() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_4/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_7/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_058_58/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0]));
    Path part = new MockPath(fs, "mock:/tbl/part1");
    conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString());
    AcidDirectory dir = AcidUtils.getAcidState(fs, part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"), null, false);
    assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString());
    List<Path> obsolete = dir.getObsolete();
    assertEquals(5, obsolete.size());
    assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(0).toString());
    assertEquals("mock:/tbl/part1/delta_058_58", obsolete.get(1).toString());
    assertEquals("mock:/tbl/part1/delta_0060_60_1", obsolete.get(2).toString());
    assertEquals("mock:/tbl/part1/delta_0060_60_4", obsolete.get(3).toString());
    assertEquals("mock:/tbl/part1/delta_0060_60_7", obsolete.get(4).toString());
    List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories();
    assertEquals(5, delts.size());
    assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_00061_61_0", delts.get(1).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_000062_62_0", delts.get(2).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_000062_62_3", delts.get(3).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_0000063_63_0", delts.get(4).getPath().toString());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) BitSet(java.util.BitSet) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) ValidReadTxnList(org.apache.hadoop.hive.common.ValidReadTxnList) Test(org.junit.Test)

Example 14 with MockFile

use of org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile in project hive by apache.

the class TestAcidUtils method testRecursiveDirListingIsReusedWhenSnapshotTrue.

@Test
public void testRecursiveDirListingIsReusedWhenSnapshotTrue() throws IOException {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_0/_orc_acid_version", 10, new byte[0]));
    conf.set(ValidTxnList.VALID_TXNS_KEY, new ValidReadTxnList(new long[0], new BitSet(), 1000, Long.MAX_VALUE).writeToString());
    AcidDirectory dir = AcidUtils.getAcidState(fs, new MockPath(fs, "mock:/tbl/part1"), conf, new ValidReaderWriteIdList(), null, false);
    assertEquals("mock:/tbl/part1/base_0", dir.getBaseDirectory().toString());
    assertEquals(0, dir.getObsolete().size());
    assertEquals(0, dir.getOriginalFiles().size());
    assertEquals(0, dir.getCurrentDirectories().size());
    assertEquals(0, fs.getNumOpenFileCalls());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) BitSet(java.util.BitSet) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) ValidReadTxnList(org.apache.hadoop.hive.common.ValidReadTxnList) Test(org.junit.Test)

Example 15 with MockFile

use of org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile in project hive by apache.

the class TestAcidUtils method testBestBase.

@Test
public void testBestBase() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_25/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_98_100/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_100/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_120_130/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_200/bucket_0", 500, new byte[0]));
    Path part = new MockPath(fs, "/tbl/part1");
    AcidUtils.Directory dir = AcidUtils.getAcidState(part, conf, new ValidReadTxnList("150:" + Long.MAX_VALUE + ":"));
    assertEquals("mock:/tbl/part1/base_100", dir.getBaseDirectory().toString());
    assertEquals(1, dir.getCurrentDirectories().size());
    assertEquals("mock:/tbl/part1/delta_120_130", dir.getCurrentDirectories().get(0).getPath().toString());
    List<FileStatus> obsoletes = dir.getObsolete();
    assertEquals(4, obsoletes.size());
    assertEquals("mock:/tbl/part1/base_10", obsoletes.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/base_25", obsoletes.get(1).getPath().toString());
    assertEquals("mock:/tbl/part1/base_5", obsoletes.get(2).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_98_100", obsoletes.get(3).getPath().toString());
    assertEquals(0, dir.getOriginalFiles().size());
    dir = AcidUtils.getAcidState(part, conf, new ValidReadTxnList("10:" + Long.MAX_VALUE + ":"));
    assertEquals("mock:/tbl/part1/base_10", dir.getBaseDirectory().toString());
    assertEquals(0, dir.getCurrentDirectories().size());
    obsoletes = dir.getObsolete();
    assertEquals(1, obsoletes.size());
    assertEquals("mock:/tbl/part1/base_5", obsoletes.get(0).getPath().toString());
    assertEquals(0, dir.getOriginalFiles().size());
    /*Single statemnt txns only: since we don't compact a txn range that includes an open txn,
    the existence of delta_120_130 implies that 121 in the exception list is aborted unless
    delta_120_130 is from streaming ingest in which case 121 can be open
    (and thus 122-130 are open too)
    99 here would be Aborted since 121 is minOpenTxn, base_100 is still good
    For multi-statment txns, see HIVE-13369*/
    dir = AcidUtils.getAcidState(part, conf, new ValidReadTxnList("150:121:99:121"));
    assertEquals("mock:/tbl/part1/base_100", dir.getBaseDirectory().toString());
    assertEquals(1, dir.getCurrentDirectories().size());
    assertEquals("mock:/tbl/part1/delta_120_130", dir.getCurrentDirectories().get(0).getPath().toString());
    obsoletes = dir.getObsolete();
    assertEquals(4, obsoletes.size());
    assertEquals("mock:/tbl/part1/base_10", obsoletes.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/base_25", obsoletes.get(1).getPath().toString());
    assertEquals("mock:/tbl/part1/base_5", obsoletes.get(2).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_98_100", obsoletes.get(3).getPath().toString());
    boolean gotException = false;
    try {
        dir = AcidUtils.getAcidState(part, conf, new ValidReadTxnList("125:5:5"));
    } catch (IOException e) {
        gotException = true;
        Assert.assertEquals("Not enough history available for (125,5).  Oldest available base: " + "mock:/tbl/part1/base_5", e.getMessage());
    }
    Assert.assertTrue("Expected exception", gotException);
    fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_12_25/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_25/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_100/bucket_0", 500, new byte[0]));
    part = new MockPath(fs, "/tbl/part1");
    try {
        gotException = false;
        dir = AcidUtils.getAcidState(part, conf, new ValidReadTxnList("150:7:7"));
    } catch (IOException e) {
        gotException = true;
        Assert.assertEquals("Not enough history available for (150,7).  Oldest available base: " + "mock:/tbl/part1/base_25", e.getMessage());
    }
    Assert.assertTrue("Expected exception", gotException);
    fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_2_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_25/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_100/bucket_0", 500, new byte[0]));
    part = new MockPath(fs, "/tbl/part1");
    try {
        gotException = false;
        dir = AcidUtils.getAcidState(part, conf, new ValidReadTxnList("150:7:7"));
    } catch (IOException e) {
        gotException = true;
        Assert.assertEquals("Not enough history available for (150,7).  Oldest available base: " + "mock:/tbl/part1/base_25", e.getMessage());
    }
    Assert.assertTrue("Expected exception", gotException);
    fs = new MockFileSystem(conf, //non-acid to acid table conversion
    new MockFile("mock:/tbl/part1/base_" + Long.MIN_VALUE + "/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_100/bucket_0", 500, new byte[0]));
    part = new MockPath(fs, "/tbl/part1");
    //note that we don't include current txn of the client in exception list to read-you-writes
    dir = AcidUtils.getAcidState(part, conf, new ValidReadTxnList("1:" + Long.MAX_VALUE + ":"));
    assertEquals("mock:/tbl/part1/base_" + Long.MIN_VALUE, dir.getBaseDirectory().toString());
    assertEquals(1, dir.getCurrentDirectories().size());
    assertEquals("mock:/tbl/part1/delta_1_1", dir.getCurrentDirectories().get(0).getPath().toString());
    assertEquals(0, dir.getObsolete().size());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) IOException(java.io.IOException) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) ValidReadTxnList(org.apache.hadoop.hive.common.ValidReadTxnList) Test(org.junit.Test)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)18 MockFile (org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile)18 MockFileSystem (org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem)18 MockPath (org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath)18 Test (org.junit.Test)18 Path (org.apache.hadoop.fs.Path)16 ValidReadTxnList (org.apache.hadoop.hive.common.ValidReadTxnList)16 BitSet (java.util.BitSet)15 ValidReaderWriteIdList (org.apache.hadoop.hive.common.ValidReaderWriteIdList)13 ValidCompactorWriteIdList (org.apache.hadoop.hive.common.ValidCompactorWriteIdList)3 HashSet (java.util.HashSet)2 HdfsFileStatusWithId (org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatusWithId)2 IOException (java.io.IOException)1 FileStatus (org.apache.hadoop.fs.FileStatus)1