use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestHBaseAggregateStatsNDVUniformDist method before.
@Before
public void before() throws IOException {
MockitoAnnotations.initMocks(this);
HiveConf conf = new HiveConf();
conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true);
conf.setBoolean(HiveConf.ConfVars.HIVE_METASTORE_STATS_NDV_DENSITY_FUNCTION.varname, true);
store = MockUtils.init(conf, htable, rows);
store.backdoor().getStatsCache().resetCounters();
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestObjectStore method testDirectSqlErrorMetrics.
@Test
public void testDirectSqlErrorMetrics() throws Exception {
HiveConf conf = new HiveConf();
conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_METRICS_ENABLED, true);
conf.setVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER, MetricsReporting.JSON_FILE.name() + "," + MetricsReporting.JMX.name());
MetricsFactory.init(conf);
CodahaleMetrics metrics = (CodahaleMetrics) MetricsFactory.getInstance();
objectStore.new GetDbHelper("foo", null, true, true) {
@Override
protected Database getSqlResult(ObjectStore.GetHelper<Database> ctx) throws MetaException {
return null;
}
@Override
protected Database getJdoResult(ObjectStore.GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
return null;
}
}.run(false);
String json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DIRECTSQL_ERRORS, "");
objectStore.new GetDbHelper("foo", null, true, true) {
@Override
protected Database getSqlResult(ObjectStore.GetHelper<Database> ctx) throws MetaException {
throw new RuntimeException();
}
@Override
protected Database getJdoResult(ObjectStore.GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
return null;
}
}.run(false);
json = metrics.dumpJson();
MetricsTestUtils.verifyMetricsJson(json, MetricsTestUtils.COUNTER, MetricsConstant.DIRECTSQL_ERRORS, 1);
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestRetriesInRetryingHMSHandler method setup.
@BeforeClass
public static void setup() throws IOException {
hiveConf = new HiveConf();
int port = MetaStoreUtils.findFreePort();
hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + port);
hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
hiveConf.setIntVar(HiveConf.ConfVars.HMSHANDLERATTEMPTS, RETRY_ATTEMPTS);
hiveConf.setTimeVar(HiveConf.ConfVars.HMSHANDLERINTERVAL, 10, TimeUnit.MILLISECONDS);
hiveConf.setBoolVar(HiveConf.ConfVars.HMSHANDLERFORCERELOADCONF, false);
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class MockUtils method init.
static HBaseStore init(Configuration conf, HTableInterface htable, final SortedMap<String, Cell> rows) throws IOException {
((HiveConf) conf).setVar(ConfVars.METASTORE_EXPRESSION_PROXY_CLASS, NOOPProxy.class.getName());
Mockito.when(htable.get(Mockito.any(Get.class))).thenAnswer(new Answer<Result>() {
@Override
public Result answer(InvocationOnMock invocation) throws Throwable {
Get get = (Get) invocation.getArguments()[0];
Cell cell = rows.get(new String(get.getRow()));
if (cell == null) {
return new Result();
} else {
return Result.create(new Cell[] { cell });
}
}
});
Mockito.when(htable.get(Mockito.anyListOf(Get.class))).thenAnswer(new Answer<Result[]>() {
@Override
public Result[] answer(InvocationOnMock invocation) throws Throwable {
@SuppressWarnings("unchecked") List<Get> gets = (List<Get>) invocation.getArguments()[0];
Result[] results = new Result[gets.size()];
for (int i = 0; i < gets.size(); i++) {
Cell cell = rows.get(new String(gets.get(i).getRow()));
Result result;
if (cell == null) {
result = new Result();
} else {
result = Result.create(new Cell[] { cell });
}
results[i] = result;
}
return results;
}
});
Mockito.when(htable.getScanner(Mockito.any(Scan.class))).thenAnswer(new Answer<ResultScanner>() {
@Override
public ResultScanner answer(InvocationOnMock invocation) throws Throwable {
Scan scan = (Scan) invocation.getArguments()[0];
List<Result> results = new ArrayList<Result>();
String start = new String(scan.getStartRow());
String stop = new String(scan.getStopRow());
SortedMap<String, Cell> sub = rows.subMap(start, stop);
for (Map.Entry<String, Cell> e : sub.entrySet()) {
results.add(Result.create(new Cell[] { e.getValue() }));
}
final Iterator<Result> iter = results.iterator();
return new ResultScanner() {
@Override
public Result next() throws IOException {
return null;
}
@Override
public Result[] next(int nbRows) throws IOException {
return new Result[0];
}
@Override
public void close() {
}
@Override
public Iterator<Result> iterator() {
return iter;
}
};
}
});
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Put put = (Put) invocation.getArguments()[0];
rows.put(new String(put.getRow()), put.getFamilyCellMap().firstEntry().getValue().get(0));
return null;
}
}).when(htable).put(Mockito.any(Put.class));
Mockito.when(htable.checkAndPut(Mockito.any(byte[].class), Mockito.any(byte[].class), Mockito.any(byte[].class), Mockito.any(byte[].class), Mockito.any(Put.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
// Always say it succeeded and overwrite
Put put = (Put) invocation.getArguments()[4];
rows.put(new String(put.getRow()), put.getFamilyCellMap().firstEntry().getValue().get(0));
return true;
}
});
Mockito.doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Delete del = (Delete) invocation.getArguments()[0];
rows.remove(new String(del.getRow()));
return null;
}
}).when(htable).delete(Mockito.any(Delete.class));
Mockito.when(htable.checkAndDelete(Mockito.any(byte[].class), Mockito.any(byte[].class), Mockito.any(byte[].class), Mockito.any(byte[].class), Mockito.any(Delete.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
// Always say it succeeded
Delete del = (Delete) invocation.getArguments()[4];
rows.remove(new String(del.getRow()));
return true;
}
});
// Mock connection
HBaseConnection hconn = Mockito.mock(HBaseConnection.class);
Mockito.when(hconn.getHBaseTable(Mockito.anyString())).thenReturn(htable);
HiveConf.setVar(conf, HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS, HBaseReadWrite.TEST_CONN);
HBaseReadWrite.setTestConnection(hconn);
HBaseReadWrite.setConf(conf);
HBaseStore store = new HBaseStore();
store.setConf(conf);
return store;
}
use of org.apache.hadoop.hive.conf.HiveConf in project hive by apache.
the class TestAggregateStatsCache method setUp.
@Before
public void setUp() {
HiveConf hiveConf = new HiveConf();
hiveConf.setIntVar(HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_SIZE, MAX_CACHE_NODES);
hiveConf.setIntVar(HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_PARTITIONS, MAX_PARTITIONS_PER_CACHE_NODE);
hiveConf.setFloatVar(HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_FPP, FALSE_POSITIVE_PROBABILITY);
hiveConf.setFloatVar(HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_VARIANCE, MAX_VARIANCE);
hiveConf.setVar(HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_TTL, TIME_TO_LIVE);
hiveConf.setVar(HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_WRITER_WAIT, MAX_WRITER_WAIT);
hiveConf.setVar(HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_READER_WAIT, MAX_READER_WAIT);
cache = AggregateStatsCache.getInstance(hiveConf);
}
Aggregations