Search in sources :

Example 1 with ValidReaderWriteIdList

use of org.apache.hadoop.hive.common.ValidReaderWriteIdList in project hive by apache.

the class TestAcidUtils method testObsoleteOriginals.

@Test
public void testObsoleteOriginals() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/base_10/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000000_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/000001_1", 500, new byte[0]));
    Path part = new MockPath(fs, "/tbl/part1");
    AcidUtils.Directory dir = AcidUtils.getAcidState(part, conf, new ValidReaderWriteIdList("tbl:150:" + Long.MAX_VALUE + ":"));
    // Obsolete list should include the two original bucket files, and the old base dir
    List<FileStatus> obsolete = dir.getObsolete();
    assertEquals(3, obsolete.size());
    assertEquals("mock:/tbl/part1/base_5", obsolete.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/base_10", dir.getBaseDirectory().toString());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) Test(org.junit.Test)

Example 2 with ValidReaderWriteIdList

use of org.apache.hadoop.hive.common.ValidReaderWriteIdList in project hive by apache.

the class TestAcidUtils method deltasWithOpenTxnInRead2.

/**
 * @since 1.3.0
 * @throws Exception
 */
@Test
public void deltasWithOpenTxnInRead2() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_4_4_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_101_101_1/bucket_0", 500, new byte[0]));
    Path part = new MockPath(fs, "mock:/tbl/part1");
    AcidUtils.Directory dir = AcidUtils.getAcidState(part, conf, new ValidReaderWriteIdList("tbl:100:4:4"));
    List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories();
    assertEquals(2, delts.size());
    assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_2_5", delts.get(1).getPath().toString());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) Test(org.junit.Test)

Example 3 with ValidReaderWriteIdList

use of org.apache.hadoop.hive.common.ValidReaderWriteIdList in project hive by apache.

the class TestAcidUtils method deltasWithOpenTxnInRead.

@Test
public void deltasWithOpenTxnInRead() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_1_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_2_5/bucket_0", 500, new byte[0]));
    Path part = new MockPath(fs, "mock:/tbl/part1");
    AcidUtils.Directory dir = AcidUtils.getAcidState(part, conf, new ValidReaderWriteIdList("tbl:100:4:4"));
    List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories();
    assertEquals(2, delts.size());
    assertEquals("mock:/tbl/part1/delta_1_1", delts.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_2_5", delts.get(1).getPath().toString());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) Test(org.junit.Test)

Example 4 with ValidReaderWriteIdList

use of org.apache.hadoop.hive.common.ValidReaderWriteIdList in project hive by apache.

the class TestAcidUtils method testOverlapingDelta2.

/**
 * Hive 1.3.0 delta dir naming scheme which supports multi-statement txns
 * @throws Exception
 */
@Test
public void testOverlapingDelta2() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62_3/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61_0/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_1/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_4/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60_7/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_058_58/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0]));
    Path part = new MockPath(fs, "mock:/tbl/part1");
    AcidUtils.Directory dir = AcidUtils.getAcidState(part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"));
    assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString());
    List<FileStatus> obsolete = dir.getObsolete();
    assertEquals(5, obsolete.size());
    assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_058_58", obsolete.get(1).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_0060_60_1", obsolete.get(2).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_0060_60_4", obsolete.get(3).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_0060_60_7", obsolete.get(4).getPath().toString());
    List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories();
    assertEquals(5, delts.size());
    assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_00061_61_0", delts.get(1).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_000062_62_0", delts.get(2).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_000062_62_3", delts.get(3).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_0000063_63_0", delts.get(4).getPath().toString());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) Test(org.junit.Test)

Example 5 with ValidReaderWriteIdList

use of org.apache.hadoop.hive.common.ValidReaderWriteIdList in project hive by apache.

the class TestAcidUtils method testOverlapingDelta.

@Test
public void testOverlapingDelta() throws Exception {
    Configuration conf = new Configuration();
    MockFileSystem fs = new MockFileSystem(conf, new MockFile("mock:/tbl/part1/delta_0000063_63/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_000062_62/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_00061_61/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_40_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_0060_60/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/delta_052_55/bucket_0", 500, new byte[0]), new MockFile("mock:/tbl/part1/base_50/bucket_0", 500, new byte[0]));
    Path part = new MockPath(fs, "mock:/tbl/part1");
    AcidUtils.Directory dir = AcidUtils.getAcidState(part, conf, new ValidReaderWriteIdList("tbl:100:" + Long.MAX_VALUE + ":"));
    assertEquals("mock:/tbl/part1/base_50", dir.getBaseDirectory().toString());
    List<FileStatus> obsolete = dir.getObsolete();
    assertEquals(2, obsolete.size());
    assertEquals("mock:/tbl/part1/delta_052_55", obsolete.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_0060_60", obsolete.get(1).getPath().toString());
    List<AcidUtils.ParsedDelta> delts = dir.getCurrentDirectories();
    assertEquals(4, delts.size());
    assertEquals("mock:/tbl/part1/delta_40_60", delts.get(0).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_00061_61", delts.get(1).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_000062_62", delts.get(2).getPath().toString());
    assertEquals("mock:/tbl/part1/delta_0000063_63", delts.get(3).getPath().toString());
}
Also used : Path(org.apache.hadoop.fs.Path) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) MockFile(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile) MockPath(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath) ValidReaderWriteIdList(org.apache.hadoop.hive.common.ValidReaderWriteIdList) MockFileSystem(org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem) Test(org.junit.Test)

Aggregations

ValidReaderWriteIdList (org.apache.hadoop.hive.common.ValidReaderWriteIdList)20 Configuration (org.apache.hadoop.conf.Configuration)17 Test (org.junit.Test)15 Path (org.apache.hadoop.fs.Path)13 MockFile (org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFile)12 MockFileSystem (org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockFileSystem)12 MockPath (org.apache.hadoop.hive.ql.io.orc.TestInputOutputFormat.MockPath)12 FileStatus (org.apache.hadoop.fs.FileStatus)8 FileSystem (org.apache.hadoop.fs.FileSystem)4 ValidWriteIdList (org.apache.hadoop.hive.common.ValidWriteIdList)4 RecordIdentifier (org.apache.hadoop.hive.ql.io.RecordIdentifier)4 AcidOutputFormat (org.apache.hadoop.hive.ql.io.AcidOutputFormat)3 AcidUtils (org.apache.hadoop.hive.ql.io.AcidUtils)3 ReaderKey (org.apache.hadoop.hive.ql.io.orc.OrcRawRecordMerger.ReaderKey)3 BitSet (java.util.BitSet)2 ReaderPair (org.apache.hadoop.hive.ql.io.orc.OrcRawRecordMerger.ReaderPair)2 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)2 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)2 HdfsFileStatusWithId (org.apache.hadoop.hive.shims.HadoopShims.HdfsFileStatusWithId)2 OrcAcidUtils (org.apache.orc.impl.OrcAcidUtils)2