use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.
the class TestMetastoreScheduledQueries method testCreate.
@Test
public void testCreate() throws Exception {
ScheduledQuery schq = createScheduledQuery(createKey("create", "c1"));
ScheduledQueryMaintenanceRequest r = new ScheduledQueryMaintenanceRequest();
r.setType(ScheduledQueryMaintenanceRequestType.CREATE);
r.setScheduledQuery(schq);
client.scheduledQueryMaintenance(r);
ScheduledQuery schq2 = client.getScheduledQuery(new ScheduledQueryKey("create", "c1"));
// next execution is set by remote
schq.setNextExecution(schq2.getNextExecution());
assertEquals(schq2, schq);
}
use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.
the class TestMetastoreScheduledQueries method testExclusivePoll.
@Test
public void testExclusivePoll() throws Exception {
try {
ObjectStoreTestHook.instance = new ObjectStoreTestHook() {
@Override
public void scheduledQueryPoll() {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
};
ScheduledQuery schq = createScheduledQuery(new ScheduledQueryKey("q1", "exclusive"));
ScheduledQueryMaintenanceRequest r = new ScheduledQueryMaintenanceRequest();
r.setType(ScheduledQueryMaintenanceRequestType.CREATE);
r.setScheduledQuery(schq);
client.scheduledQueryMaintenance(r);
// wait 1 sec for next execution
Thread.sleep(1000);
ExecutorService pool = Executors.newCachedThreadPool();
Future<ScheduledQueryPollResponse> f1 = pool.submit(new AsyncPollCall("exclusive"));
Future<ScheduledQueryPollResponse> f2 = pool.submit(new AsyncPollCall("exclusive"));
ScheduledQueryPollResponse resp1 = f1.get();
ScheduledQueryPollResponse resp2 = f2.get();
assertTrue(resp1.isSetQuery() ^ resp2.isSetQuery());
pool.shutdown();
} finally {
ObjectStoreTestHook.instance = null;
}
}
use of org.apache.hadoop.hive.metastore.api.ScheduledQuery._Fields in project hive by apache.
the class ColumnStatsMergerFactory method getColumnStatsMerger.
public static ColumnStatsMerger getColumnStatsMerger(ColumnStatisticsObj statsObjNew, ColumnStatisticsObj statsObjOld) {
ColumnStatsMerger agg;
_Fields typeNew = statsObjNew.getStatsData().getSetField();
_Fields typeOld = statsObjOld.getStatsData().getSetField();
// make sure that they have the same type
typeNew = typeNew == typeOld ? typeNew : null;
int numBitVectors = 0;
switch(typeNew) {
case BOOLEAN_STATS:
agg = new BooleanColumnStatsMerger();
break;
case LONG_STATS:
{
agg = new LongColumnStatsMerger();
int nbvNew = countNumBitVectors(statsObjNew.getStatsData().getLongStats().getBitVectors());
int nbvOld = countNumBitVectors(statsObjOld.getStatsData().getLongStats().getBitVectors());
numBitVectors = nbvNew == nbvOld ? nbvNew : 0;
break;
}
case DOUBLE_STATS:
{
agg = new DoubleColumnStatsMerger();
int nbvNew = countNumBitVectors(statsObjNew.getStatsData().getDoubleStats().getBitVectors());
int nbvOld = countNumBitVectors(statsObjOld.getStatsData().getDoubleStats().getBitVectors());
numBitVectors = nbvNew == nbvOld ? nbvNew : 0;
break;
}
case STRING_STATS:
{
agg = new StringColumnStatsMerger();
int nbvNew = countNumBitVectors(statsObjNew.getStatsData().getStringStats().getBitVectors());
int nbvOld = countNumBitVectors(statsObjOld.getStatsData().getStringStats().getBitVectors());
numBitVectors = nbvNew == nbvOld ? nbvNew : 0;
break;
}
case BINARY_STATS:
agg = new BinaryColumnStatsMerger();
break;
case DECIMAL_STATS:
{
agg = new DecimalColumnStatsMerger();
int nbvNew = countNumBitVectors(statsObjNew.getStatsData().getDecimalStats().getBitVectors());
int nbvOld = countNumBitVectors(statsObjOld.getStatsData().getDecimalStats().getBitVectors());
numBitVectors = nbvNew == nbvOld ? nbvNew : 0;
break;
}
default:
throw new RuntimeException("Woh, bad. Unknown stats type " + typeNew.toString());
}
if (numBitVectors > 0) {
agg.ndvEstimator = new NumDistinctValueEstimator(numBitVectors);
}
return agg;
}
Aggregations