use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestObjectStoreSchemaMethods method createUniqueDatabaseForTest.
private String createUniqueDatabaseForTest() throws MetaException, InvalidObjectException {
String dbName = "uniquedbfortest" + dbNum++;
Database db = new DatabaseBuilder().setName(dbName).setLocation("somewhere").setDescription("descriptive").build();
objectStore.createDatabase(db);
return dbName;
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestOldSchema method testPartitionOps.
/**
* Tests partition operations
*/
@Test
public void testPartitionOps() throws Exception {
String dbName = "default";
String tableName = "snp";
Database db1 = new Database(dbName, "description", "locationurl", null);
store.createDatabase(db1);
long now = System.currentTimeMillis();
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema("col1", "long", "nocomment"));
SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, serde, null, null, Collections.emptyMap());
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("ds", "string", ""));
Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 0, sd, partCols, Collections.emptyMap(), null, null, null);
store.createTable(table);
Deadline.startTimer("getPartition");
for (int i = 0; i < 10; i++) {
List<String> partVal = new ArrayList<>();
partVal.add(String.valueOf(i));
StorageDescriptor psd = new StorageDescriptor(sd);
psd.setLocation("file:/tmp/default/hit/ds=" + partVal);
Partition part = new Partition(partVal, dbName, tableName, (int) now, (int) now, psd, Collections.emptyMap());
store.addPartition(part);
ColumnStatistics cs = new ColumnStatistics();
ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, tableName);
desc.setLastAnalyzed(now);
desc.setPartName("ds=" + String.valueOf(i));
cs.setStatsDesc(desc);
ColumnStatisticsObj obj = new ColumnStatisticsObj();
obj.setColName("col1");
obj.setColType("bigint");
ColumnStatisticsData data = new ColumnStatisticsData();
LongColumnStatsData dcsd = new LongColumnStatsData();
dcsd.setHighValue(1000 + i);
dcsd.setLowValue(-1000 - i);
dcsd.setNumNulls(i);
dcsd.setNumDVs(10 * i + 1);
dcsd.setBitVectors(bitVectors[0]);
data.setLongStats(dcsd);
obj.setStatsData(data);
cs.addToStatsObj(obj);
store.updatePartitionColumnStatistics(cs, partVal);
}
Checker statChecker = new Checker() {
@Override
public void checkStats(AggrStats aggrStats) throws Exception {
Assert.assertEquals(10, aggrStats.getPartsFound());
Assert.assertEquals(1, aggrStats.getColStatsSize());
ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
Assert.assertEquals("col1", cso.getColName());
Assert.assertEquals("bigint", cso.getColType());
LongColumnStatsData lcsd = cso.getStatsData().getLongStats();
Assert.assertEquals(1009, lcsd.getHighValue(), 0.01);
Assert.assertEquals(-1009, lcsd.getLowValue(), 0.01);
Assert.assertEquals(45, lcsd.getNumNulls());
Assert.assertEquals(91, lcsd.getNumDVs());
}
};
List<String> partNames = new ArrayList<>();
for (int i = 0; i < 10; i++) {
partNames.add("ds=" + i);
}
AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName, partNames, Arrays.asList("col1"));
statChecker.checkStats(aggrStats);
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestRemoteHiveMetaStoreIpAddress method testIpAddress.
@Test
public void testIpAddress() throws Exception {
Database db = new Database();
db.setName("testIpAddressIp");
msc.createDatabase(db);
msc.dropDatabase(db.getName());
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestRetryingHMSHandler method testRetryingHMSHandler.
// Create a database and a table in that database. Because the AlternateFailurePreListener is
// being used each attempt to create something should require two calls by the RetryingHMSHandler
@Test
public void testRetryingHMSHandler() throws Exception {
String dbName = "hive4159";
String tblName = "tmptbl";
Database db = new Database();
db.setName(dbName);
msc.createDatabase(db);
Assert.assertEquals(2, AlternateFailurePreListener.getCallCount());
Table tbl = new TableBuilder().setDbName(dbName).setTableName(tblName).addCol("c1", ColumnType.STRING_TYPE_NAME).build();
msc.createTable(tbl);
Assert.assertEquals(4, AlternateFailurePreListener.getCallCount());
}
use of org.apache.hadoop.hive.metastore.api.Database in project hive by apache.
the class TestCachedStore method testAggrStatsRepeatedRead.
@Test
public void testAggrStatsRepeatedRead() throws Exception {
String dbName = "testTableColStatsOps";
String tblName = "tbl";
String colName = "f1";
Database db = new Database(dbName, null, "some_location", null);
cachedStore.createDatabase(db);
List<FieldSchema> cols = new ArrayList<>();
cols.add(new FieldSchema(colName, "int", null));
List<FieldSchema> partCols = new ArrayList<>();
partCols.add(new FieldSchema("col", "int", null));
StorageDescriptor sd = new StorageDescriptor(cols, null, "input", "output", false, 0, new SerDeInfo("serde", "seriallib", new HashMap<>()), null, null, null);
Table tbl = new Table(tblName, dbName, null, 0, 0, 0, sd, partCols, new HashMap<>(), null, null, TableType.MANAGED_TABLE.toString());
cachedStore.createTable(tbl);
List<String> partVals1 = new ArrayList<>();
partVals1.add("1");
List<String> partVals2 = new ArrayList<>();
partVals2.add("2");
Partition ptn1 = new Partition(partVals1, dbName, tblName, 0, 0, sd, new HashMap<>());
cachedStore.addPartition(ptn1);
Partition ptn2 = new Partition(partVals2, dbName, tblName, 0, 0, sd, new HashMap<>());
cachedStore.addPartition(ptn2);
ColumnStatistics stats = new ColumnStatistics();
ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, dbName, tblName);
statsDesc.setPartName("col");
List<ColumnStatisticsObj> colStatObjs = new ArrayList<>();
ColumnStatisticsData data = new ColumnStatisticsData();
ColumnStatisticsObj colStats = new ColumnStatisticsObj(colName, "int", data);
LongColumnStatsDataInspector longStats = new LongColumnStatsDataInspector();
longStats.setLowValue(0);
longStats.setHighValue(100);
longStats.setNumNulls(50);
longStats.setNumDVs(30);
data.setLongStats(longStats);
colStatObjs.add(colStats);
stats.setStatsDesc(statsDesc);
stats.setStatsObj(colStatObjs);
cachedStore.updatePartitionColumnStatistics(stats.deepCopy(), partVals1);
cachedStore.updatePartitionColumnStatistics(stats.deepCopy(), partVals2);
List<String> colNames = new ArrayList<>();
colNames.add(colName);
List<String> aggrPartVals = new ArrayList<>();
aggrPartVals.add("1");
aggrPartVals.add("2");
AggrStats aggrStats = cachedStore.get_aggr_stats_for(dbName, tblName, aggrPartVals, colNames);
Assert.assertEquals(aggrStats.getColStats().get(0).getStatsData().getLongStats().getNumNulls(), 100);
aggrStats = cachedStore.get_aggr_stats_for(dbName, tblName, aggrPartVals, colNames);
Assert.assertEquals(aggrStats.getColStats().get(0).getStatsData().getLongStats().getNumNulls(), 100);
}
Aggregations