use of io.cdap.cdap.client.NamespaceClient in project cdap by caskdata.
the class UpgradeTool method main.
public static void main(String[] args) throws Exception {
Options options = new Options().addOption(new Option("h", "help", false, "Print this usage message.")).addOption(new Option("u", "uri", true, "CDAP instance URI to interact with in the format " + "[http[s]://]<hostname>:<port>. Defaults to localhost:11015.")).addOption(new Option("a", "accesstoken", true, "File containing the access token to use when interacting " + "with a secure CDAP instance.")).addOption(new Option("t", "timeout", true, "Timeout in milliseconds to use when interacting with the " + "CDAP RESTful APIs. Defaults to " + DEFAULT_READ_TIMEOUT_MILLIS + ".")).addOption(new Option("n", "namespace", true, "Namespace to perform the upgrade in. If none is given, " + "pipelines in all namespaces will be upgraded.")).addOption(new Option("p", "pipeline", true, "Name of the pipeline to upgrade. If specified, a namespace " + "must also be given.")).addOption(new Option("v", "version", true, "Pipeline version to upgrade to. This should only be specified if " + "you want to upgrade to a version that is not the same as the version of this tool.")).addOption(new Option("r", "rerun", false, "Whether to re-run upgrade of pipelines. " + "This will re-run the upgrade for any pipelines that are using the current pipeline version in addition " + "to running upgrade for any old pipelines.")).addOption(new Option("f", "configfile", true, "File containing old application details to update. " + "The file contents are expected to be in the same format as the request body for creating an " + "ETL application from one of the etl artifacts. " + "It is expected to be a JSON Object containing 'artifact' and 'config' fields." + "The value for 'artifact' must be a JSON Object that specifies the artifact scope, name, and version. " + "The value for 'config' must be a JSON Object specifies the source, transforms, and sinks of the pipeline, " + "as expected by older versions of the etl artifacts.")).addOption(new Option("o", "outputfile", true, "File to write the converted application details provided in " + "the configfile option. If none is given, results will be written to the input file + '.converted'. " + "The contents of this file can be sent directly to CDAP to update or create an application.")).addOption(new Option("od", "outputdir", true, "Directory to write the application request that would be used " + "to upgrade the pipeline(s). This should only be used with the 'dryrun' command, not the 'upgrade' command. " + "The contents of the app request files can be sent directly to CDAP to update or create an application.")).addOption(new Option("e", "errorDir", true, "Optional directory to write any upgraded pipeline configs that " + "failed to upgrade. The problematic configs can then be manually edited and upgraded separately. " + "Upgrade errors may happen for pipelines that use plugins that are not backwards compatible. " + "This directory must be writable by the user that is running this tool."));
CommandLineParser parser = new BasicParser();
CommandLine commandLine = parser.parse(options, args);
String[] commandArgs = commandLine.getArgs();
String command = commandArgs.length > 0 ? commandArgs[0] : null;
// if help is an option, or if there isn't a single 'upgrade' command, print usage and exit.
if (commandLine.hasOption("h") || commandArgs.length != 1 || (!"downgrade".equalsIgnoreCase(command) && !"upgrade".equalsIgnoreCase(command) && !"dryrun".equalsIgnoreCase(command))) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp(UpgradeTool.class.getName() + " upgrade|downgrade|dryrun", "Upgrades old pipelines to the current version. If the plugins used are not backward-compatible, " + "the attempted upgrade config will be written to the error directory for a manual upgrade. " + "If 'dryrun' is used as the command instead of 'upgrade', pipelines will not be upgraded, but the " + "application update requests will instead be written as files to the specified outputdir.", options, "");
System.exit(0);
}
ClientConfig clientConfig = getClientConfig(commandLine);
boolean downgrade = "downgrade".equalsIgnoreCase(command);
String newVersion = commandLine.hasOption("v") ? commandLine.getOptionValue("v") : ETLVersion.getVersion();
boolean includeCurrentVersion = commandLine.hasOption("r");
if (commandLine.hasOption("f")) {
String inputFilePath = commandLine.getOptionValue("f");
String outputFilePath = commandLine.hasOption("o") ? commandLine.getOptionValue("o") : inputFilePath + ".new";
convertFile(inputFilePath, outputFilePath, new Upgrader(new NamespaceClient(clientConfig), new ArtifactClient(clientConfig), newVersion, includeCurrentVersion, downgrade));
System.exit(0);
}
File errorDir = commandLine.hasOption("e") ? new File(commandLine.getOptionValue("e")) : null;
if (errorDir != null) {
ensureDirExists(errorDir);
}
boolean dryrun = "dryrun".equalsIgnoreCase(command);
File outputDir = null;
if (dryrun) {
if (!commandLine.hasOption("od")) {
LOG.error("When performing a dryrun, an outputdir must be specified using the -od option.");
System.exit(1);
}
outputDir = new File(commandLine.getOptionValue("od"));
ensureDirExists(outputDir);
}
UpgradeTool upgradeTool = new UpgradeTool(clientConfig, errorDir, outputDir, newVersion, includeCurrentVersion, downgrade, dryrun);
String namespace = commandLine.getOptionValue("n");
String pipelineName = commandLine.getOptionValue("p");
if (pipelineName != null) {
if (namespace == null) {
throw new IllegalArgumentException("Must specify a namespace when specifying a pipeline.");
}
ApplicationId appId = new ApplicationId(namespace, pipelineName);
if (upgradeTool.upgrade(appId)) {
LOG.info("Successfully {}d {}.", command, appId);
} else {
LOG.info("{} did not need to be {}d.", appId, command);
}
System.exit(0);
}
if (namespace != null) {
printUpgraded(upgradeTool.upgrade(new NamespaceId(namespace)));
System.exit(0);
}
printUpgraded(upgradeTool.upgrade());
}
use of io.cdap.cdap.client.NamespaceClient in project cdap by caskdata.
the class PostUpgradeJobMain method restartPipelinesAndSchedules.
private static void restartPipelinesAndSchedules(ClientConfig clientConfig, long startTimeMillis, boolean restartSystemApps) throws Exception {
long endTimeMillis = System.currentTimeMillis();
ApplicationClient applicationClient = new ApplicationClient(clientConfig);
ScheduleClient scheduleClient = new ScheduleClient(clientConfig);
ProgramClient programClient = new ProgramClient(clientConfig);
NamespaceClient namespaceClient = new NamespaceClient(clientConfig);
List<NamespaceId> namespaceIdList = namespaceClient.list().stream().map(NamespaceMeta::getNamespaceId).collect(Collectors.toList());
if (restartSystemApps) {
namespaceIdList.add(NamespaceId.SYSTEM);
}
for (NamespaceId namespaceId : namespaceIdList) {
for (ApplicationRecord record : applicationClient.list(namespaceId)) {
ApplicationId applicationId = new ApplicationId(namespaceId.getNamespace(), record.getName(), record.getAppVersion());
programClient.restart(applicationId, TimeUnit.MILLISECONDS.toSeconds(startTimeMillis), TimeUnit.MILLISECONDS.toSeconds(endTimeMillis));
}
// Re-enable schedules in a namespace AFTER programs have been restarted.
scheduleClient.reEnableSuspendedSchedules(namespaceId, startTimeMillis, endTimeMillis);
}
}
use of io.cdap.cdap.client.NamespaceClient in project cdap by caskdata.
the class UpgradeJobMain method suspendSchedulesAndStopPipelines.
private static void suspendSchedulesAndStopPipelines(ClientConfig clientConfig) throws Exception {
ApplicationClient applicationClient = new ApplicationClient(clientConfig);
ScheduleClient scheduleClient = new ScheduleClient(clientConfig);
ProgramClient programClient = new ProgramClient(clientConfig);
NamespaceClient namespaceClient = new NamespaceClient(clientConfig);
boolean shouldRetry = false;
List<NamespaceId> namespaceIdList = namespaceClient.list().stream().map(NamespaceMeta::getNamespaceId).collect(Collectors.toList());
namespaceIdList.add(NamespaceId.SYSTEM);
for (NamespaceId namespaceId : namespaceIdList) {
for (ApplicationRecord record : applicationClient.list(namespaceId)) {
ApplicationId applicationId = new ApplicationId(namespaceId.getNamespace(), record.getName(), record.getAppVersion());
LOG.debug("Trying to stop schedule and workflows for application " + applicationId);
List<WorkflowId> workflowIds = applicationClient.get(applicationId).getPrograms().stream().filter(programRecord -> programRecord.getType().equals(ProgramType.WORKFLOW)).map(programRecord -> new WorkflowId(applicationId, programRecord.getName())).collect(Collectors.toList());
for (WorkflowId workflowId : workflowIds) {
List<ScheduleId> scheduleIds = scheduleClient.listSchedules(workflowId).stream().map(scheduleDetail -> new ScheduleId(namespaceId.getNamespace(), record.getName(), record.getAppVersion(), scheduleDetail.getName())).collect(Collectors.toList());
for (ScheduleId scheduleId : scheduleIds) {
if (scheduleClient.getStatus(scheduleId).equals(SCHEDULED)) {
scheduleClient.suspend(scheduleId);
}
}
// Need to stop workflows first or else the program will fail to stop below
if (!programClient.getStatus(workflowId).equals(ProgramStatus.STOPPED.toString())) {
try {
programClient.stop(workflowId);
} catch (BadRequestException e) {
// transitioned to stop state since it was checked earlier or not.
if (!programClient.getStatus(workflowId).equals(ProgramStatus.STOPPED.toString())) {
// Pipeline still in running state. Continue with stopping rest of the pipelines in this namespace and
// next retry should try to stop/verify status for this pipeline.
shouldRetry = true;
}
}
}
}
}
// At least one pipeline is still in running state so retry to verify pipeline status .
if (shouldRetry) {
throw new RetryableException("At least one pipeline in namespace " + namespaceId + " is still running.");
}
// All schedules are stopped, now stop all programs
programClient.stopAll(namespaceId);
}
}
use of io.cdap.cdap.client.NamespaceClient in project cdap by caskdata.
the class CLITestBase method testDataset.
@Test
public void testDataset() throws Exception {
String datasetName = PREFIX + "sdf123lkj";
String ownedDatasetName = PREFIX + "owned";
CLIConfig cliConfig = getCliConfig();
DatasetTypeClient datasetTypeClient = new DatasetTypeClient(cliConfig.getClientConfig());
DatasetTypeMeta datasetType = datasetTypeClient.list(NamespaceId.DEFAULT).get(0);
testCommandOutputContains("create dataset instance " + datasetType.getName() + " " + datasetName + " \"a=1\"", "Successfully created dataset");
testCommandOutputContains("list dataset instances", FakeDataset.class.getSimpleName());
testCommandOutputContains("get dataset instance properties " + datasetName, "a,1");
// test dataset creation with owner
String commandOutput = getCommandOutput("create dataset instance " + datasetType.getName() + " " + ownedDatasetName + " \"a=1\"" + " " + "someDescription " + ArgumentName.PRINCIPAL + " alice/somehost.net@somekdc.net");
Assert.assertTrue(commandOutput.contains("Successfully created dataset"));
Assert.assertTrue(commandOutput.contains("alice/somehost.net@somekdc.net"));
// test describing the table returns the given owner information
testCommandOutputContains("describe dataset instance " + ownedDatasetName, "alice/somehost.net@somekdc.net");
NamespaceClient namespaceClient = new NamespaceClient(cliConfig.getClientConfig());
NamespaceId barspace = new NamespaceId("bar");
namespaceClient.create(new NamespaceMeta.Builder().setName(barspace).build());
cliConfig.setNamespace(barspace);
// list of dataset instances is different in 'foo' namespace
testCommandOutputNotContains("list dataset instances", FakeDataset.class.getSimpleName());
// also can not create dataset instances if the type it depends on exists only in a different namespace.
DatasetTypeId datasetType1 = barspace.datasetType(datasetType.getName());
testCommandOutputContains("create dataset instance " + datasetType.getName() + " " + datasetName, new DatasetTypeNotFoundException(datasetType1).getMessage());
testCommandOutputContains("use namespace default", "Now using namespace 'default'");
try {
testCommandOutputContains("truncate dataset instance " + datasetName, "Successfully truncated");
} finally {
testCommandOutputContains("delete dataset instance " + datasetName, "Successfully deleted");
}
String datasetName2 = PREFIX + "asoijm39485";
String description = "test-description-for-" + datasetName2;
testCommandOutputContains("create dataset instance " + datasetType.getName() + " " + datasetName2 + " \"a=1\"" + " " + description, "Successfully created dataset");
testCommandOutputContains("list dataset instances", description);
testCommandOutputContains("delete dataset instance " + datasetName2, "Successfully deleted");
testCommandOutputContains("delete dataset instance " + ownedDatasetName, "Successfully deleted");
}
Aggregations