Search in sources :

Example 41 with CommandProcessorResponse

use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.

the class TestSemanticAnalysis method testAlterTblTouch.

@Test
public void testAlterTblTouch() throws Exception {
    hcatDriver.run("drop table junit_sem_analysis");
    hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis touch");
    assertEquals(0, response.getResponseCode());
    hcatDriver.run("alter table junit_sem_analysis touch partition (b='12')");
    assertEquals(0, response.getResponseCode());
    hcatDriver.run("drop table junit_sem_analysis");
}
Also used : CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) Test(org.junit.Test) HCatBaseTest(org.apache.hive.hcatalog.mapreduce.HCatBaseTest)

Example 42 with CommandProcessorResponse

use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.

the class TestSemanticAnalysis method testCreateTableIfNotExists.

@Test
public void testCreateTableIfNotExists() throws Exception {
    hcatDriver.run("drop table " + TBL_NAME);
    hcatDriver.run("create table " + TBL_NAME + " (a int) stored as RCFILE");
    Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
    List<FieldSchema> cols = tbl.getSd().getCols();
    assertEquals(1, cols.size());
    assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
    assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
    assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
    CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE");
    assertEquals(0, resp.getResponseCode());
    assertNull(resp.getErrorMessage());
    tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
    cols = tbl.getSd().getCols();
    assertEquals(1, cols.size());
    assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
    assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
    assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
    hcatDriver.run("drop table junit_sem_analysis");
}
Also used : RCFileOutputFormat(org.apache.hadoop.hive.ql.io.RCFileOutputFormat) Table(org.apache.hadoop.hive.metastore.api.Table) RCFileInputFormat(org.apache.hadoop.hive.ql.io.RCFileInputFormat) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) Test(org.junit.Test) HCatBaseTest(org.apache.hive.hcatalog.mapreduce.HCatBaseTest)

Example 43 with CommandProcessorResponse

use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.

the class TestSemanticAnalysis method testInvalidateClusteredBy.

@Test
public void testInvalidateClusteredBy() throws Exception {
    hcatDriver.run("drop table junit_sem_analysis");
    query = "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE";
    CommandProcessorResponse response = hcatDriver.run(query);
    assertEquals(0, response.getResponseCode());
}
Also used : CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) Test(org.junit.Test) HCatBaseTest(org.apache.hive.hcatalog.mapreduce.HCatBaseTest)

Example 44 with CommandProcessorResponse

use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.

the class TestSemanticAnalysis method testAlterTblClusteredBy.

@Test
public void testAlterTblClusteredBy() throws Exception {
    hcatDriver.run("drop table junit_sem_analysis");
    hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis clustered by (a) into 7 buckets");
    assertEquals(0, response.getResponseCode());
    hcatDriver.run("drop table junit_sem_analysis");
}
Also used : CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) Test(org.junit.Test) HCatBaseTest(org.apache.hive.hcatalog.mapreduce.HCatBaseTest)

Example 45 with CommandProcessorResponse

use of org.apache.hadoop.hive.ql.processors.CommandProcessorResponse in project hive by apache.

the class TestSemanticAnalysis method testInvalidateTextFileStoredAs.

@Test
public void testInvalidateTextFileStoredAs() throws Exception {
    hcatDriver.run("drop table junit_sem_analysis");
    query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as TEXTFILE";
    CommandProcessorResponse response = hcatDriver.run(query);
    assertEquals(0, response.getResponseCode());
}
Also used : CommandProcessorResponse(org.apache.hadoop.hive.ql.processors.CommandProcessorResponse) Test(org.junit.Test) HCatBaseTest(org.apache.hive.hcatalog.mapreduce.HCatBaseTest)

Aggregations

CommandProcessorResponse (org.apache.hadoop.hive.ql.processors.CommandProcessorResponse)145 Test (org.junit.Test)92 ShowLocksResponseElement (org.apache.hadoop.hive.metastore.api.ShowLocksResponseElement)24 HCatBaseTest (org.apache.hive.hcatalog.mapreduce.HCatBaseTest)19 IOException (java.io.IOException)18 ArrayList (java.util.ArrayList)17 AddDynamicPartitions (org.apache.hadoop.hive.metastore.api.AddDynamicPartitions)8 Database (org.apache.hadoop.hive.metastore.api.Database)8 HiveConf (org.apache.hadoop.hive.conf.HiveConf)7 Table (org.apache.hadoop.hive.metastore.api.Table)7 Path (org.apache.hadoop.fs.Path)6 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)6 PigServer (org.apache.pig.PigServer)5 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)4 Driver (org.apache.hadoop.hive.ql.Driver)4 IDriver (org.apache.hadoop.hive.ql.IDriver)4 LockException (org.apache.hadoop.hive.ql.lockmgr.LockException)4 PerfLogger (org.apache.hadoop.hive.ql.log.PerfLogger)4 ParseException (org.apache.hadoop.hive.ql.parse.ParseException)4 HashMap (java.util.HashMap)3