use of org.apache.knox.gateway.ha.provider.HaDescriptor in project knox by apache.
the class DefaultHaProviderTest method testAddingService.
@Test
public void testAddingService() {
HaDescriptor descriptor = new DefaultHaDescriptor();
HaProvider provider = new DefaultHaProvider(descriptor);
ArrayList<String> urls = new ArrayList<String>();
urls.add("http://host1");
urls.add("http://host2");
provider.addHaService("foo", urls);
assertNull(provider.getActiveURL("bar"));
String url = provider.getActiveURL("foo");
assertNotNull(url);
assertThat(url, isIn(urls));
}
use of org.apache.knox.gateway.ha.provider.HaDescriptor in project knox by apache.
the class HaDescriptorManagerTest method testDescriptorStoring.
@Test
public void testDescriptorStoring() throws IOException {
HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig("foo", "false", "42", "1000", "3", "3000", "foo:2181,bar:2181", "hiveserver2"));
descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig("bar", "true", "3", "5000", "5", "8000", null, null));
StringWriter writer = new StringWriter();
HaDescriptorManager.store(descriptor, writer);
String descriptorXml = writer.toString();
String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>\n" + "<ha>\n" + " <service enabled=\"false\" failoverSleep=\"1000\" maxFailoverAttempts=\"42\" maxRetryAttempts=\"3\" name=\"foo\" retrySleep=\"3000\" zookeeperEnsemble=\"foo:2181,bar:2181\" zookeeperNamespace=\"hiveserver2\"/>\n" + " <service enabled=\"true\" failoverSleep=\"5000\" maxFailoverAttempts=\"3\" maxRetryAttempts=\"5\" name=\"bar\" retrySleep=\"8000\"/>\n" + "</ha>\n";
assertThat(the(xml), hasXPath("/ha/service[@enabled='false' and @failoverSleep='1000' and @maxFailoverAttempts='42' and @maxRetryAttempts='3' and @name='foo' and @retrySleep='3000' and @zookeeperEnsemble='foo:2181,bar:2181' and @zookeeperNamespace='hiveserver2']"));
assertThat(the(xml), hasXPath("/ha/service[@enabled='true' and @failoverSleep='5000' and @maxFailoverAttempts='3' and @maxRetryAttempts='5' and @name='bar' and @retrySleep='8000']"));
}
use of org.apache.knox.gateway.ha.provider.HaDescriptor in project knox by apache.
the class HaDescriptorManagerTest method testDescriptorLoad.
@Test
public void testDescriptorLoad() throws IOException {
String xml = "<ha><service name='foo' maxFailoverAttempts='42' failoverSleep='4000' maxRetryAttempts='2' retrySleep='2213' enabled='false'/>" + "<service name='bar' failoverLimit='3' enabled='true'/></ha>";
ByteArrayInputStream inputStream = new ByteArrayInputStream(xml.getBytes());
HaDescriptor descriptor = HaDescriptorManager.load(inputStream);
assertNotNull(descriptor);
assertEquals(1, descriptor.getEnabledServiceNames().size());
HaServiceConfig config = descriptor.getServiceConfig("foo");
assertNotNull(config);
assertEquals("foo", config.getServiceName());
assertEquals(42, config.getMaxFailoverAttempts());
assertEquals(4000, config.getFailoverSleep());
assertEquals(2, config.getMaxRetryAttempts());
assertEquals(2213, config.getRetrySleep());
assertFalse(config.isEnabled());
config = descriptor.getServiceConfig("bar");
assertTrue(config.isEnabled());
}
use of org.apache.knox.gateway.ha.provider.HaDescriptor in project knox by apache.
the class HaProviderDeploymentContributor method contributeProvider.
@Override
public void contributeProvider(DeploymentContext context, Provider provider) {
Topology topology = context.getTopology();
Map<String, String> params = provider.getParams();
HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
for (Entry<String, String> entry : params.entrySet()) {
String role = entry.getKey();
String roleParams = entry.getValue();
// Create the config based on whatever is specified at the provider level
HaServiceConfig config = HaDescriptorFactory.createServiceConfig(role, roleParams);
// Check for service-level param overrides
Map<String, String> serviceLevelParams = null;
for (Service s : topology.getServices()) {
if (s.getRole().equals(role)) {
serviceLevelParams = s.getParams();
break;
}
}
// Apply any service-level param overrides
applyParamOverrides(config, serviceLevelParams);
// Add the reconciled HA service config to the descriptor
descriptor.addServiceConfig(config);
}
StringWriter writer = new StringWriter();
try {
HaDescriptorManager.store(descriptor, writer);
} catch (IOException e) {
LOG.failedToWriteHaDescriptor(e);
}
String asset = writer.toString();
context.getWebArchive().addAsWebInfResource(new StringAsset(asset), HaServletContextListener.DESCRIPTOR_DEFAULT_FILE_NAME);
context.addDescriptor(HA_DESCRIPTOR_NAME, descriptor);
}
use of org.apache.knox.gateway.ha.provider.HaDescriptor in project knox by apache.
the class WebHdfsHaDispatchTest method testConnectivityFailover.
@Test
public void testConnectivityFailover() throws Exception {
String serviceName = "WEBHDFS";
HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
descriptor.addServiceConfig(HaDescriptorFactory.createServiceConfig(serviceName, "true", "1", "1000", "2", "1000", null, null));
HaProvider provider = new DefaultHaProvider(descriptor);
URI uri1 = new URI("http://unreachable-host");
URI uri2 = new URI("http://reachable-host");
ArrayList<String> urlList = new ArrayList<String>();
urlList.add(uri1.toString());
urlList.add(uri2.toString());
provider.addHaService(serviceName, urlList);
FilterConfig filterConfig = EasyMock.createNiceMock(FilterConfig.class);
ServletContext servletContext = EasyMock.createNiceMock(ServletContext.class);
EasyMock.expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes();
EasyMock.expect(servletContext.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)).andReturn(provider).anyTimes();
BasicHttpParams params = new BasicHttpParams();
HttpUriRequest outboundRequest = EasyMock.createNiceMock(HttpRequestBase.class);
EasyMock.expect(outboundRequest.getMethod()).andReturn("GET").anyTimes();
EasyMock.expect(outboundRequest.getURI()).andReturn(uri1).anyTimes();
EasyMock.expect(outboundRequest.getParams()).andReturn(params).anyTimes();
HttpServletRequest inboundRequest = EasyMock.createNiceMock(HttpServletRequest.class);
EasyMock.expect(inboundRequest.getRequestURL()).andReturn(new StringBuffer(uri2.toString())).once();
EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(0)).once();
EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(1)).once();
HttpServletResponse outboundResponse = EasyMock.createNiceMock(HttpServletResponse.class);
EasyMock.expect(outboundResponse.getOutputStream()).andAnswer(new IAnswer<SynchronousServletOutputStreamAdapter>() {
@Override
public SynchronousServletOutputStreamAdapter answer() throws Throwable {
return new SynchronousServletOutputStreamAdapter() {
@Override
public void write(int b) throws IOException {
throw new IOException("unreachable-host");
}
};
}
}).once();
EasyMock.replay(filterConfig, servletContext, outboundRequest, inboundRequest, outboundResponse);
Assert.assertEquals(uri1.toString(), provider.getActiveURL(serviceName));
WebHdfsHaDispatch dispatch = new WebHdfsHaDispatch();
HttpClientBuilder builder = HttpClientBuilder.create();
CloseableHttpClient client = builder.build();
dispatch.setHttpClient(client);
dispatch.setHaProvider(provider);
dispatch.init();
long startTime = System.currentTimeMillis();
try {
dispatch.executeRequest(outboundRequest, inboundRequest, outboundResponse);
} catch (IOException e) {
// this is expected after the failover limit is reached
}
long elapsedTime = System.currentTimeMillis() - startTime;
Assert.assertEquals(uri2.toString(), provider.getActiveURL(serviceName));
// test to make sure the sleep took place
Assert.assertTrue(elapsedTime > 1000);
}
Aggregations