Search in sources :

Example 6 with HiveConf

use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.

the class TestHiveCopyFiles method setUp.

@BeforeClass
public static void setUp() {
    hiveConf = new HiveConf(TestHiveCopyFiles.class);
    SessionState.start(hiveConf);
}
Also used : HiveConf(org.apache.hadoop.hive.conf.HiveConf) BeforeClass(org.junit.BeforeClass)

Example 7 with HiveConf

use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.

the class TestZookeeperLockManager method setup.

@Before
public void setup() {
    conf = new HiveConf();
    lockObjData = new HiveLockObjectData("1", "10", "SHARED", "show tables");
    hiveLock = new HiveLockObject(TABLE, lockObjData);
    zLock = new ZooKeeperHiveLock(TABLE_LOCK_PATH, hiveLock, HiveLockMode.SHARED);
    while (server == null) {
        try {
            server = new TestingServer();
            CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory.builder();
            client = builder.connectString(server.getConnectString()).retryPolicy(new RetryOneTime(1)).build();
            client.start();
        } catch (Exception e) {
            System.err.println("Getting bind exception - retrying to allocate server");
            server = null;
        }
    }
}
Also used : TestingServer(org.apache.curator.test.TestingServer) HiveLockObject(org.apache.hadoop.hive.ql.lockmgr.HiveLockObject) RetryOneTime(org.apache.curator.retry.RetryOneTime) CuratorFrameworkFactory(org.apache.curator.framework.CuratorFrameworkFactory) HiveLockObjectData(org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData) HiveConf(org.apache.hadoop.hive.conf.HiveConf) KeeperException(org.apache.zookeeper.KeeperException) Before(org.junit.Before)

Example 8 with HiveConf

use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.

the class TestHive method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    hiveConf = new HiveConf(this.getClass());
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    // enable trash so it can be tested
    // FS_TRASH_CHECKPOINT_INTERVAL_KEY (hadoop-2)
    hiveConf.setFloat("fs.trash.checkpoint.interval", 30);
    // FS_TRASH_INTERVAL_KEY (hadoop-2)
    hiveConf.setFloat("fs.trash.interval", 30);
    SessionState.start(hiveConf);
    try {
        hm = Hive.get(hiveConf);
    } catch (Exception e) {
        System.err.println(StringUtils.stringifyException(e));
        System.err.println("Unable to initialize Hive Metastore using configuration: \n " + hiveConf);
        throw e;
    }
}
Also used : HiveConf(org.apache.hadoop.hive.conf.HiveConf) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 9 with HiveConf

use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.

the class TestWorker method inputSplit.

@Test
public void inputSplit() throws Exception {
    String basename = "/warehouse/foo/base_1";
    String delta1 = "/warehouse/foo/delta_2_3";
    String delta2 = "/warehouse/foo/delta_4_7";
    HiveConf conf = new HiveConf();
    Path file = new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest");
    FileSystem fs = FileSystem.get(conf);
    FSDataOutputStream os = fs.create(file);
    for (int i = 0; i < 10; i++) {
        os.writeBytes("mary had a little lamb its fleece was white as snow\n");
    }
    os.close();
    List<Path> files = new ArrayList<Path>(1);
    files.add(file);
    Path[] deltas = new Path[2];
    deltas[0] = new Path(delta1);
    deltas[1] = new Path(delta2);
    CompactorMR.CompactorInputSplit split = new CompactorMR.CompactorInputSplit(conf, 3, files, new Path(basename), deltas);
    Assert.assertEquals(520L, split.getLength());
    String[] locations = split.getLocations();
    Assert.assertEquals(1, locations.length);
    Assert.assertEquals("localhost", locations[0]);
    ByteArrayOutputStream buf = new ByteArrayOutputStream();
    DataOutput out = new DataOutputStream(buf);
    split.write(out);
    split = new CompactorMR.CompactorInputSplit();
    DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray()));
    split.readFields(in);
    Assert.assertEquals(3, split.getBucket());
    Assert.assertEquals(basename, split.getBaseDir().toString());
    deltas = split.getDeltaDirs();
    Assert.assertEquals(2, deltas.length);
    Assert.assertEquals(delta1, deltas[0].toString());
    Assert.assertEquals(delta2, deltas[1].toString());
}
Also used : Path(org.apache.hadoop.fs.Path) DataOutput(java.io.DataOutput) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) DataOutputStream(java.io.DataOutputStream) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataInputStream(java.io.DataInputStream) DataInput(java.io.DataInput) ByteArrayInputStream(java.io.ByteArrayInputStream) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) Test(org.junit.Test)

Example 10 with HiveConf

use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.

the class TestWorker method inputSplitNullBase.

@Test
public void inputSplitNullBase() throws Exception {
    String delta1 = "/warehouse/foo/delta_2_3";
    String delta2 = "/warehouse/foo/delta_4_7";
    HiveConf conf = new HiveConf();
    Path file = new Path(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "newWriteInputSplitTest");
    FileSystem fs = FileSystem.get(conf);
    FSDataOutputStream os = fs.create(file);
    for (int i = 0; i < 10; i++) {
        os.writeBytes("mary had a little lamb its fleece was white as snow\n");
    }
    os.close();
    List<Path> files = new ArrayList<Path>(1);
    files.add(file);
    Path[] deltas = new Path[2];
    deltas[0] = new Path(delta1);
    deltas[1] = new Path(delta2);
    CompactorMR.CompactorInputSplit split = new CompactorMR.CompactorInputSplit(conf, 3, files, null, deltas);
    ByteArrayOutputStream buf = new ByteArrayOutputStream();
    DataOutput out = new DataOutputStream(buf);
    split.write(out);
    split = new CompactorMR.CompactorInputSplit();
    DataInput in = new DataInputStream(new ByteArrayInputStream(buf.toByteArray()));
    split.readFields(in);
    Assert.assertEquals(3, split.getBucket());
    Assert.assertNull(split.getBaseDir());
    deltas = split.getDeltaDirs();
    Assert.assertEquals(2, deltas.length);
    Assert.assertEquals(delta1, deltas[0].toString());
    Assert.assertEquals(delta2, deltas[1].toString());
}
Also used : Path(org.apache.hadoop.fs.Path) DataOutput(java.io.DataOutput) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) DataOutputStream(java.io.DataOutputStream) ArrayList(java.util.ArrayList) ByteArrayOutputStream(java.io.ByteArrayOutputStream) DataInputStream(java.io.DataInputStream) DataInput(java.io.DataInput) ByteArrayInputStream(java.io.ByteArrayInputStream) FileSystem(org.apache.hadoop.fs.FileSystem) HiveConf(org.apache.hadoop.hive.conf.HiveConf) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) Test(org.junit.Test)

Aggregations

HiveConf (org.apache.hadoop.hive.conf.HiveConf)404 BeforeClass (org.junit.BeforeClass)73 Test (org.junit.Test)66 Path (org.apache.hadoop.fs.Path)54 Before (org.junit.Before)50 Driver (org.apache.hadoop.hive.ql.Driver)46 CliSessionState (org.apache.hadoop.hive.cli.CliSessionState)44 IOException (java.io.IOException)39 ArrayList (java.util.ArrayList)37 File (java.io.File)31 HashMap (java.util.HashMap)26 FileSystem (org.apache.hadoop.fs.FileSystem)26 SessionState (org.apache.hadoop.hive.ql.session.SessionState)22 LinkedHashMap (java.util.LinkedHashMap)17 List (java.util.List)16 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)15 MiniHS2 (org.apache.hive.jdbc.miniHS2.MiniHS2)14 Map (java.util.Map)12 HiveMetaStoreClient (org.apache.hadoop.hive.metastore.HiveMetaStoreClient)12 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)12