use of org.apache.whirr.service.hadoop.HadoopProxy in project whirr by apache.
the class HadoopServiceTest method setUp.
@Before
public void setUp() throws ConfigurationException, IOException {
CompositeConfiguration config = new CompositeConfiguration();
if (System.getProperty("config") != null) {
config.addConfiguration(new PropertiesConfiguration(System.getProperty("config")));
}
config.addConfiguration(new PropertiesConfiguration("whirr-hadoop-test.properties"));
clusterSpec = ClusterSpec.fromConfiguration(config);
Service s = new ServiceFactory().create(clusterSpec.getServiceName());
assertThat(s, instanceOf(HadoopService.class));
service = (HadoopService) s;
cluster = service.launchCluster(clusterSpec);
proxy = new HadoopProxy(clusterSpec, cluster);
proxy.start();
}
use of org.apache.whirr.service.hadoop.HadoopProxy in project whirr by apache.
the class HBaseOldServiceController method startup.
public synchronized void startup() throws Exception {
LOG.info("Starting up cluster...");
CompositeConfiguration config = new CompositeConfiguration();
if (System.getProperty("config") != null) {
config.addConfiguration(new PropertiesConfiguration(System.getProperty("config")));
}
config.addConfiguration(new PropertiesConfiguration(this.configResource));
clusterSpec = ClusterSpec.withTemporaryKeys(config);
controller = new ClusterController();
cluster = controller.launchCluster(clusterSpec);
proxy = new HadoopProxy(clusterSpec, cluster);
proxy.start();
waitForMaster();
running = true;
}
use of org.apache.whirr.service.hadoop.HadoopProxy in project whirr by apache.
the class HBaseMasterClusterActionHandler method createProxyScript.
private void createProxyScript(ClusterSpec clusterSpec, Cluster cluster) {
File configDir = getConfigDir(clusterSpec);
File hbaseProxyFile = new File(configDir, "hbase-proxy.sh");
try {
HadoopProxy proxy = new HadoopProxy(clusterSpec, cluster);
InetAddress master = HBaseCluster.getMasterPublicAddress(cluster);
String script = String.format("echo 'Running proxy to HBase cluster at %s. " + "Use Ctrl-c to quit.'\n", master.getHostName()) + Joiner.on(" ").join(proxy.getProxyCommand());
Files.write(script, hbaseProxyFile, Charsets.UTF_8);
hbaseProxyFile.setExecutable(true);
LOG.info("Wrote HBase proxy script {}", hbaseProxyFile);
} catch (IOException e) {
LOG.error("Problem writing HBase proxy script {}", hbaseProxyFile, e);
}
}
use of org.apache.whirr.service.hadoop.HadoopProxy in project whirr by apache.
the class HBaseServiceController method startup.
public synchronized void startup() throws Exception {
LOG.info("Starting up cluster...");
CompositeConfiguration config = new CompositeConfiguration();
if (System.getProperty("config") != null) {
config.addConfiguration(new PropertiesConfiguration(System.getProperty("config")));
}
config.addConfiguration(new PropertiesConfiguration(this.configResource));
clusterSpec = ClusterSpec.withTemporaryKeys(config);
controller = new ClusterController();
cluster = controller.launchCluster(clusterSpec);
proxy = new HadoopProxy(clusterSpec, cluster);
proxy.start();
waitForMaster();
running = true;
}
use of org.apache.whirr.service.hadoop.HadoopProxy in project whirr by apache.
the class HadoopClusterExample method main.
@Override
public int main(String[] args) throws Exception {
if (!System.getenv().containsKey("AWS_ACCESS_KEY_ID")) {
LOG.error("AWS_ACCESS_KEY_ID is undefined in the current environment");
return -1;
}
if (!System.getenv().containsKey("AWS_SECRET_ACCESS_KEY")) {
LOG.error("AWS_SECRET_ACCESS_KEY is undefined in the current environment");
return -2;
}
/**
* Start by loading cluster configuration file and creating a ClusterSpec object
*
* You can find the file in the resources folder.
*/
ClusterSpec spec = new ClusterSpec(new PropertiesConfiguration("whirr-hadoop-example.properties"));
/**
* Create an instance of the generic cluster controller
*/
ClusterControllerFactory factory = new ClusterControllerFactory();
ClusterController controller = factory.create(spec.getServiceName());
/**
* Start the cluster as defined in the configuration file
*/
HadoopProxy proxy = null;
try {
LOG.info("Starting cluster {}", spec.getClusterName());
Cluster cluster = controller.launchCluster(spec);
LOG.info("Starting local SOCKS proxy");
proxy = new HadoopProxy(spec, cluster);
proxy.start();
/**
* Obtain a Hadoop configuration object and wait for services to start
*/
Configuration config = getHadoopConfiguration(cluster);
JobConf job = new JobConf(config, HadoopClusterExample.class);
JobClient client = new JobClient(job);
waitToExitSafeMode(client);
waitForTaskTrackers(client);
/**
* Run a simple job to show that the cluster is available for work.
*/
runWordCountingJob(config);
} finally {
/**
* Stop the proxy and terminate all the cluster instances.
*/
if (proxy != null) {
proxy.stop();
}
controller.destroyCluster(spec);
return 0;
}
}
Aggregations