use of co.cask.cdap.proto.id.NamespaceId in project cdap by caskdata.
the class DefaultStoreTest method testRemoveApplication.
@Test
public void testRemoveApplication() throws Exception {
ApplicationSpecification spec = Specifications.from(new WordCountApp());
NamespaceId namespaceId = new NamespaceId("account1");
ApplicationId appId = namespaceId.app(spec.getName());
store.addApplication(appId, spec);
Assert.assertNotNull(store.getApplication(appId));
// removing application
store.removeApplication(appId);
Assert.assertNull(store.getApplication(appId));
}
use of co.cask.cdap.proto.id.NamespaceId in project cdap by caskdata.
the class DefaultStoreTest method testDeleteSuspendedWorkflow.
@Test
public void testDeleteSuspendedWorkflow() {
NamespaceId namespaceId = new NamespaceId("namespace1");
// Test delete application
ApplicationId appId1 = namespaceId.app("app1");
ProgramId programId1 = appId1.workflow("pgm1");
RunId run1 = RunIds.generate();
store.setStart(programId1, run1.getId(), runIdToSecs(run1));
store.setSuspend(programId1, run1.getId());
store.removeApplication(appId1);
Assert.assertTrue(store.getRuns(programId1, ProgramRunStatus.ALL, 0, Long.MAX_VALUE, Integer.MAX_VALUE).isEmpty());
// Test delete namespace
ProgramId programId2 = namespaceId.app("app2").workflow("pgm2");
RunId run2 = RunIds.generate();
store.setStart(programId2, run2.getId(), runIdToSecs(run2));
store.setSuspend(programId2, run2.getId());
store.removeAll(namespaceId);
nsStore.delete(namespaceId);
Assert.assertTrue(store.getRuns(programId2, ProgramRunStatus.ALL, 0, Long.MAX_VALUE, Integer.MAX_VALUE).isEmpty());
}
use of co.cask.cdap.proto.id.NamespaceId in project cdap by caskdata.
the class UpgradeTool method main.
public static void main(String[] args) throws Exception {
Options options = new Options().addOption(new Option("h", "help", false, "Print this usage message.")).addOption(new Option("u", "uri", true, "CDAP instance URI to interact with in the format " + "[http[s]://]<hostname>:<port>. Defaults to localhost:11015.")).addOption(new Option("a", "accesstoken", true, "File containing the access token to use when interacting " + "with a secure CDAP instance.")).addOption(new Option("t", "timeout", true, "Timeout in milliseconds to use when interacting with the " + "CDAP RESTful APIs. Defaults to " + DEFAULT_READ_TIMEOUT_MILLIS + ".")).addOption(new Option("n", "namespace", true, "Namespace to perform the upgrade in. If none is given, " + "pipelines in all namespaces will be upgraded.")).addOption(new Option("p", "pipeline", true, "Name of the pipeline to upgrade. If specified, a namespace " + "must also be given.")).addOption(new Option("f", "configfile", true, "File containing old application details to update. " + "The file contents are expected to be in the same format as the request body for creating an " + "ETL application from one of the etl artifacts. " + "It is expected to be a JSON Object containing 'artifact' and 'config' fields." + "The value for 'artifact' must be a JSON Object that specifies the artifact scope, name, and version. " + "The value for 'config' must be a JSON Object specifies the source, transforms, and sinks of the pipeline, " + "as expected by older versions of the etl artifacts.")).addOption(new Option("o", "outputfile", true, "File to write the converted application details provided in " + "the configfile option. If none is given, results will be written to the input file + '.converted'. " + "The contents of this file can be sent directly to CDAP to update or create an application.")).addOption(new Option("e", "errorDir", true, "Optional directory to write any upgraded pipeline configs that " + "failed to upgrade. The problematic configs can then be manually edited and upgraded separately. " + "Upgrade errors may happen for pipelines that use plugins that are not backwards compatible. " + "This directory must be writable by the user that is running this tool."));
CommandLineParser parser = new BasicParser();
CommandLine commandLine = parser.parse(options, args);
String[] commandArgs = commandLine.getArgs();
// if help is an option, or if there isn't a single 'upgrade' command, print usage and exit.
if (commandLine.hasOption("h") || commandArgs.length != 1 || !"upgrade".equalsIgnoreCase(commandArgs[0])) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp(UpgradeTool.class.getName() + " upgrade", "Upgrades old pipelines to the current version. If the plugins used are not backward-compatible, " + "the attempted upgrade config will be written to the error directory for a manual upgrade.", options, "");
System.exit(0);
}
ClientConfig clientConfig = getClientConfig(commandLine);
if (commandLine.hasOption("f")) {
String inputFilePath = commandLine.getOptionValue("f");
String outputFilePath = commandLine.hasOption("o") ? commandLine.getOptionValue("o") : inputFilePath + ".new";
convertFile(inputFilePath, outputFilePath, new Upgrader(new ArtifactClient(clientConfig)));
System.exit(0);
}
File errorDir = commandLine.hasOption("e") ? new File(commandLine.getOptionValue("e")) : null;
if (errorDir != null) {
if (!errorDir.exists()) {
if (!errorDir.mkdirs()) {
LOG.error("Unable to create error directory {}.", errorDir.getAbsolutePath());
System.exit(1);
}
} else if (!errorDir.isDirectory()) {
LOG.error("{} is not a directory.", errorDir.getAbsolutePath());
System.exit(1);
} else if (!errorDir.canWrite()) {
LOG.error("Unable to write to error directory {}.", errorDir.getAbsolutePath());
System.exit(1);
}
}
UpgradeTool upgradeTool = new UpgradeTool(clientConfig, errorDir);
String namespace = commandLine.getOptionValue("n");
String pipelineName = commandLine.getOptionValue("p");
if (pipelineName != null) {
if (namespace == null) {
throw new IllegalArgumentException("Must specify a namespace when specifying a pipeline.");
}
ApplicationId appId = new ApplicationId(namespace, pipelineName);
if (upgradeTool.upgrade(appId)) {
LOG.info("Successfully upgraded {}.", appId);
} else {
LOG.info("{} did not need to be upgraded.", appId);
}
System.exit(0);
}
if (namespace != null) {
printUpgraded(upgradeTool.upgrade(new NamespaceId(namespace)));
System.exit(0);
}
printUpgraded(upgradeTool.upgrade());
}
use of co.cask.cdap.proto.id.NamespaceId in project cdap by caskdata.
the class RestartProgramsCommand method runBatchCommand.
@Override
protected void runBatchCommand(PrintStream printStream, Args<BatchProgram> args) throws Exception {
NamespaceId namespace = args.appId.getParent();
printStream.print("Stopping programs...\n");
programClient.stop(namespace, args.programs);
printStream.print("Starting programs...\n");
List<BatchProgramStart> startList = new ArrayList<>(args.programs.size());
for (BatchProgram program : args.programs) {
startList.add(new BatchProgramStart(program));
}
programClient.start(namespace, startList);
}
use of co.cask.cdap.proto.id.NamespaceId in project cdap by caskdata.
the class StreamFileJanitor method cleanAll.
/**
* Performs file cleanup for all streams.
*/
public void cleanAll() throws Exception {
List<NamespaceMeta> namespaces = namespaceQueryAdmin.list();
for (final NamespaceMeta namespace : namespaces) {
final NamespaceId namespaceId = namespace.getNamespaceId();
final Location streamBaseLocation = impersonator.doAs(namespaceId, new Callable<Location>() {
@Override
public Location call() throws Exception {
return namespacedLocationFactory.get(namespaceId).append(streamBaseDirPath);
}
});
boolean exists = streamBaseLocation.exists();
if (exists) {
// Remove everything under the deleted directory
Location deletedLocation = StreamUtils.getDeletedLocation(streamBaseLocation);
if (deletedLocation.exists()) {
Locations.deleteContent(deletedLocation);
}
}
if (!exists) {
continue;
}
Iterable<Location> streamLocations = StreamUtils.listAllStreams(streamBaseLocation);
for (final Location streamLocation : streamLocations) {
final StreamId streamId = namespaceId.stream(StreamUtils.getStreamNameFromLocation(streamLocation));
final AtomicLong ttl = new AtomicLong(0);
if (isStreamExists(streamId)) {
ttl.set(streamAdmin.getConfig(streamId).getTTL());
}
clean(streamLocation, ttl.get(), System.currentTimeMillis());
}
}
}
Aggregations