use of org.apache.druid.java.util.http.client.HttpClient in project druid by druid-io.
the class ITTLSTest method checkAccessWithWrongHostname.
@Test
public void checkAccessWithWrongHostname() {
LOG.info("---------Testing TLS resource access when client certificate has non-matching hostnames---------");
HttpClient wrongHostnameClient = makeCustomHttpClient("client_tls/invalid_hostname_client.jks", "invalid_hostname_client");
checkFailedAccessWrongHostname(wrongHostnameClient, HttpMethod.GET, config.getCoordinatorTLSUrl());
checkFailedAccessWrongHostname(wrongHostnameClient, HttpMethod.GET, config.getOverlordTLSUrl());
checkFailedAccessWrongHostname(wrongHostnameClient, HttpMethod.GET, config.getBrokerTLSUrl());
checkFailedAccessWrongHostname(wrongHostnameClient, HttpMethod.GET, config.getHistoricalTLSUrl());
checkFailedAccessWrongHostname(wrongHostnameClient, HttpMethod.GET, config.getRouterTLSUrl());
makeRequest(wrongHostnameClient, HttpMethod.GET, config.getPermissiveRouterTLSUrl() + "/status", null);
makeRequest(wrongHostnameClient, HttpMethod.GET, config.getNoClientAuthRouterTLSUrl() + "/status", null);
}
use of org.apache.druid.java.util.http.client.HttpClient in project druid by druid-io.
the class ITTLSTest method makeCustomHttpClient.
private HttpClient makeCustomHttpClient(String keystorePath, String certAlias, TLSCertificateChecker certificateChecker) {
SSLContext intermediateClientSSLContext = new TLSUtils.ClientSSLContextBuilder().setProtocol(sslClientConfig.getProtocol()).setTrustStoreType(sslClientConfig.getTrustStoreType()).setTrustStorePath(sslClientConfig.getTrustStorePath()).setTrustStoreAlgorithm(sslClientConfig.getTrustStoreAlgorithm()).setTrustStorePasswordProvider(sslClientConfig.getTrustStorePasswordProvider()).setKeyStoreType(sslClientConfig.getKeyStoreType()).setKeyStorePath(keystorePath).setKeyStoreAlgorithm(sslClientConfig.getKeyManagerFactoryAlgorithm()).setCertAlias(certAlias).setKeyStorePasswordProvider(sslClientConfig.getKeyStorePasswordProvider()).setKeyManagerFactoryPasswordProvider(sslClientConfig.getKeyManagerPasswordProvider()).setCertificateChecker(certificateChecker).build();
final HttpClientConfig.Builder builder = getHttpClientConfigBuilder(intermediateClientSSLContext);
final Lifecycle lifecycle = new Lifecycle();
HttpClient client = HttpClientInit.createClient(builder.build(), lifecycle);
HttpClient adminClient = new CredentialedHttpClient(new BasicCredentials("admin", "priest"), client);
return adminClient;
}
use of org.apache.druid.java.util.http.client.HttpClient in project druid by druid-io.
the class ITTLSTest method testPlaintextAccess.
@Test
public void testPlaintextAccess() {
LOG.info("---------Testing resource access without TLS---------");
HttpClient adminClient = new CredentialedHttpClient(new BasicCredentials("admin", "priest"), httpClient);
makeRequest(adminClient, HttpMethod.GET, config.getCoordinatorUrl() + "/status", null);
makeRequest(adminClient, HttpMethod.GET, config.getOverlordUrl() + "/status", null);
makeRequest(adminClient, HttpMethod.GET, config.getBrokerUrl() + "/status", null);
makeRequest(adminClient, HttpMethod.GET, config.getHistoricalUrl() + "/status", null);
makeRequest(adminClient, HttpMethod.GET, config.getRouterUrl() + "/status", null);
makeRequest(adminClient, HttpMethod.GET, config.getPermissiveRouterUrl() + "/status", null);
makeRequest(adminClient, HttpMethod.GET, config.getNoClientAuthRouterUrl() + "/status", null);
}
use of org.apache.druid.java.util.http.client.HttpClient in project hive by apache.
the class DruidStorageHandler method makeHttpClient.
private static HttpClient makeHttpClient(Lifecycle lifecycle) {
final int numConnection = HiveConf.getIntVar(SessionState.getSessionConf(), HiveConf.ConfVars.HIVE_DRUID_NUM_HTTP_CONNECTION);
final Period readTimeout = new Period(HiveConf.getVar(SessionState.getSessionConf(), HiveConf.ConfVars.HIVE_DRUID_HTTP_READ_TIMEOUT));
LOG.info("Creating Druid HTTP client with {} max parallel connections and {}ms read timeout", numConnection, readTimeout.toStandardDuration().getMillis());
final HttpClient httpClient = HttpClientInit.createClient(HttpClientConfig.builder().withNumConnections(numConnection).withReadTimeout(new Period(readTimeout).toStandardDuration()).build(), lifecycle);
final boolean kerberosEnabled = HiveConf.getBoolVar(SessionState.getSessionConf(), HiveConf.ConfVars.HIVE_DRUID_KERBEROS_ENABLE);
if (kerberosEnabled && UserGroupInformation.isSecurityEnabled()) {
LOG.info("building Kerberos Http Client");
return new KerberosHttpClient(httpClient);
}
return httpClient;
}
use of org.apache.druid.java.util.http.client.HttpClient in project hive by apache.
the class TestDruidSerDe method deserializeQueryResults.
@SuppressWarnings("unchecked")
private void deserializeQueryResults(DruidSerDe serDe, String queryType, String jsonQuery, byte[] resultString, Object[][] records) throws SerDeException, IOException, NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, InterruptedException, NoSuchMethodException, InvocationTargetException {
// Initialize
HttpClient httpClient = mock(HttpClient.class);
SettableFuture<InputStream> futureResult = SettableFuture.create();
futureResult.set(new ByteArrayInputStream(resultString));
when(httpClient.go(anyObject(), any(HttpResponseHandler.class))).thenReturn(futureResult);
DruidQueryRecordReader<?> reader = DruidQueryBasedInputFormat.getDruidQueryReader(queryType);
final HiveDruidSplit split = new HiveDruidSplit(jsonQuery, new Path("empty"), new String[] { "testing_host" });
Configuration conf = new Configuration();
reader.initialize(split, DruidStorageHandlerUtils.JSON_MAPPER, DruidStorageHandlerUtils.SMILE_MAPPER, httpClient, conf);
StructObjectInspector oi = (StructObjectInspector) serDe.getObjectInspector();
List<? extends StructField> fieldRefs = oi.getAllStructFieldRefs();
// Check mapred
DruidWritable writable = reader.createValue();
int pos = 0;
while (reader.next(NullWritable.get(), writable)) {
List<Object> row = (List<Object>) serDe.deserialize(writable);
Object[] expectedFieldsData = records[pos];
assertEquals(expectedFieldsData.length, fieldRefs.size());
for (int i = 0; i < fieldRefs.size(); i++) {
assertEquals("Field " + i + " type", expectedFieldsData[i].getClass(), row.get(i).getClass());
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
assertEquals("Field " + i, expectedFieldsData[i], fieldData);
}
pos++;
}
assertEquals(pos, records.length);
// Check mapreduce path
futureResult = SettableFuture.create();
futureResult.set(new ByteArrayInputStream(resultString));
when(httpClient.go(anyObject(), any(HttpResponseHandler.class))).thenReturn(futureResult);
reader = DruidQueryBasedInputFormat.getDruidQueryReader(queryType);
reader.initialize(split, DruidStorageHandlerUtils.JSON_MAPPER, DruidStorageHandlerUtils.SMILE_MAPPER, httpClient, conf);
pos = 0;
while (reader.nextKeyValue()) {
List<Object> row = (List<Object>) serDe.deserialize(reader.getCurrentValue());
Object[] expectedFieldsData = records[pos];
assertEquals(expectedFieldsData.length, fieldRefs.size());
for (int i = 0; i < fieldRefs.size(); i++) {
assertEquals("Field " + i + " type", expectedFieldsData[i].getClass(), row.get(i).getClass());
Object fieldData = oi.getStructFieldData(row, fieldRefs.get(i));
assertEquals("Field " + i, expectedFieldsData[i], fieldData);
}
pos++;
}
assertEquals(pos, records.length);
}
Aggregations