use of io.fabric8.openshift.client.OpenShiftClient in project fabric8-maven-plugin by fabric8io.
the class ImportMojo method executeInternal.
@Override
public void executeInternal() throws MojoExecutionException, MojoFailureException {
if (!basedir.isDirectory() || !basedir.exists()) {
throw new MojoExecutionException("No directory for base directory: " + basedir);
}
// lets check for a git repo
String gitRemoteURL = null;
Repository repository = null;
try {
repository = GitUtils.findRepository(basedir);
} catch (IOException e) {
throw new MojoExecutionException("Failed to find local git repository in current directory: " + e, e);
}
try {
gitRemoteURL = GitUtils.getRemoteAsHttpsURL(repository);
} catch (Exception e) {
throw new MojoExecutionException("Failed to get the current git branch: " + e, e);
}
try {
clusterAccess = new ClusterAccess(this.namespace);
if (Strings.isNullOrBlank(projectName)) {
projectName = basedir.getName();
}
KubernetesClient kubernetes = clusterAccess.createDefaultClient(log);
KubernetesResourceUtil.validateKubernetesMasterUrl(kubernetes.getMasterUrl());
String namespace = clusterAccess.getNamespace();
OpenShiftClient openShiftClient = getOpenShiftClientOrJenkinsShift(kubernetes, namespace);
if (gitRemoteURL != null) {
// lets check we don't already have this project imported
String branch = repository.getBranch();
BuildConfig buildConfig = findBuildConfigForGitRepo(openShiftClient, namespace, gitRemoteURL, branch);
if (buildConfig != null) {
logBuildConfigLink(kubernetes, namespace, buildConfig, log);
throw new MojoExecutionException("Project already imported into build " + getName(buildConfig) + " for URI: " + gitRemoteURL + " and branch: " + branch);
} else {
Map<String, String> annotations = new HashMap<>();
annotations.put(Annotations.Builds.GIT_CLONE_URL, gitRemoteURL);
buildConfig = createBuildConfig(kubernetes, namespace, projectName, gitRemoteURL, annotations);
openShiftClient.buildConfigs().inNamespace(namespace).create(buildConfig);
ensureExternalGitSecretsAreSetupFor(kubernetes, namespace, gitRemoteURL);
logBuildConfigLink(kubernetes, namespace, buildConfig, log);
}
} else {
// lets create an import a new project
UserDetails userDetails = createGogsUserDetails(kubernetes, namespace);
userDetails.setBranch(branchName);
BuildConfigHelper.CreateGitProjectResults createGitProjectResults;
try {
createGitProjectResults = BuildConfigHelper.importNewGitProject(kubernetes, userDetails, basedir, namespace, projectName, remoteName, "Importing project from mvn fabric8:import", false);
} catch (WebApplicationException e) {
Response response = e.getResponse();
if (response.getStatus() > 400) {
String message = getEntityMessage(response);
log.warn("Could not create the git repository: %s %s", e, message);
log.warn("Are your username and password correct in the Secret %s/%s?", secretNamespace, gogsSecretName);
log.warn("To re-enter your password rerun this command with -Dfabric8.passsword.retry=true");
throw new MojoExecutionException("Could not create the git repository. " + "Are your username and password correct in the Secret " + secretNamespace + "/" + gogsSecretName + "?" + e + message, e);
} else {
throw e;
}
}
BuildConfig buildConfig = createGitProjectResults.getBuildConfig();
openShiftClient.buildConfigs().inNamespace(namespace).create(buildConfig);
logBuildConfigLink(kubernetes, namespace, buildConfig, log);
}
} catch (KubernetesClientException e) {
KubernetesResourceUtil.handleKubernetesClientException(e, this.log);
} catch (Exception e) {
throw new MojoExecutionException(e.getMessage(), e);
}
}
use of io.fabric8.openshift.client.OpenShiftClient in project fabric8-maven-plugin by fabric8io.
the class BaseBoosterIT method setupSampleTestRepository.
protected Repository setupSampleTestRepository(String repositoryUrl, String relativePomPath) throws IOException, GitAPIException, XmlPullParserException {
openShiftClient = new DefaultOpenShiftClient(new ConfigBuilder().build());
testsuiteNamespace = openShiftClient.getNamespace();
Repository repository = cloneRepositoryUsingHttp(repositoryUrl);
modifyPomFileToProjectVersion(repository, relativePomPath);
return repository;
}
use of io.fabric8.openshift.client.OpenShiftClient in project syndesis by syndesisio.
the class PrometheusMetricsProviderImplTest method setUp.
@Before
public void setUp() {
final PrometheusConfigurationProperties config = new PrometheusConfigurationProperties();
config.setService("syndesis-prometheus-syndesis.192.168.64.22.nip.io");
NamespacedOpenShiftClient openshiftClient = mock(NamespacedOpenShiftClient.class);
when(openshiftClient.pods().withName("syndesis-server").get().getStatus().getStartTime()).thenReturn("2018-03-14T23:34:09Z");
metricsProvider = new PrometheusMetricsProviderImpl(config, openshiftClient);
}
use of io.fabric8.openshift.client.OpenShiftClient in project fabric8 by fabric8io.
the class WatchBuildsExample method main.
public static void main(String... args) throws Exception {
OpenShiftClient client = new DefaultOpenShiftClient();
client.builds().watch(new Watcher<Build>() {
@Override
public void eventReceived(Action action, Build build) {
System.out.println(action + ": " + build);
}
@Override
public void onClose(KubernetesClientException e) {
System.out.println("Closed: " + e);
}
});
client.close();
}
use of io.fabric8.openshift.client.OpenShiftClient in project fabric8 by fabric8io.
the class Controller method applyEntity.
/**
* Applies the given DTOs onto the Kubernetes master
*/
public void applyEntity(Object dto, String sourceName) throws Exception {
if (dto instanceof Pod) {
applyPod((Pod) dto, sourceName);
} else if (dto instanceof ReplicationController) {
applyReplicationController((ReplicationController) dto, sourceName);
} else if (dto instanceof Service) {
applyService((Service) dto, sourceName);
} else if (dto instanceof Namespace) {
applyNamespace((Namespace) dto);
} else if (dto instanceof Route) {
applyRoute((Route) dto, sourceName);
} else if (dto instanceof BuildConfig) {
applyBuildConfig((BuildConfig) dto, sourceName);
} else if (dto instanceof DeploymentConfig) {
DeploymentConfig resource = (DeploymentConfig) dto;
OpenShiftClient openShiftClient = getOpenShiftClientOrNull();
if (openShiftClient != null && openShiftClient.supportsOpenShiftAPIGroup(OpenShiftAPIGroups.APPS)) {
applyResource(resource, sourceName, openShiftClient.deploymentConfigs());
} else {
LOG.warn("Not connected to OpenShift cluster so cannot apply entity " + dto);
}
} else if (dto instanceof PolicyBinding) {
applyPolicyBinding((PolicyBinding) dto, sourceName);
} else if (dto instanceof RoleBinding) {
applyRoleBinding((RoleBinding) dto, sourceName);
} else if (dto instanceof Role) {
Role resource = (Role) dto;
OpenShiftClient openShiftClient = getOpenShiftClientOrNull();
if (openShiftClient != null && openShiftClient.supportsOpenShiftAPIGroup(OpenShiftAPIGroups.AUTHORIZATION)) {
applyResource(resource, sourceName, openShiftClient.roles());
} else {
LOG.warn("Not connected to OpenShift cluster so cannot apply entity " + dto);
}
} else if (dto instanceof ImageStream) {
applyImageStream((ImageStream) dto, sourceName);
} else if (dto instanceof OAuthClient) {
applyOAuthClient((OAuthClient) dto, sourceName);
} else if (dto instanceof Template) {
applyTemplate((Template) dto, sourceName);
} else if (dto instanceof ServiceAccount) {
applyServiceAccount((ServiceAccount) dto, sourceName);
} else if (dto instanceof Secret) {
applySecret((Secret) dto, sourceName);
} else if (dto instanceof ConfigMap) {
applyResource((ConfigMap) dto, sourceName, kubernetesClient.configMaps());
} else if (dto instanceof DaemonSet) {
applyResource((DaemonSet) dto, sourceName, kubernetesClient.extensions().daemonSets());
} else if (dto instanceof Deployment) {
applyResource((Deployment) dto, sourceName, kubernetesClient.extensions().deployments());
} else if (dto instanceof ReplicaSet) {
applyResource((ReplicaSet) dto, sourceName, kubernetesClient.extensions().replicaSets());
} else if (dto instanceof StatefulSet) {
applyResource((StatefulSet) dto, sourceName, kubernetesClient.apps().statefulSets());
} else if (dto instanceof Ingress) {
applyResource((Ingress) dto, sourceName, kubernetesClient.extensions().ingresses());
} else if (dto instanceof PersistentVolumeClaim) {
applyPersistentVolumeClaim((PersistentVolumeClaim) dto, sourceName);
} else if (dto instanceof HasMetadata) {
HasMetadata entity = (HasMetadata) dto;
try {
String namespace = getNamespace();
String resourceNamespace = getNamespace(entity);
if (Strings.isNotBlank(namespace) && Strings.isNullOrBlank(resourceNamespace)) {
getOrCreateMetadata(entity).setNamespace(namespace);
}
LOG.info("Applying " + getKind(entity) + " " + getName(entity) + " from " + sourceName);
kubernetesClient.resource(entity).inNamespace(namespace).createOrReplace();
} catch (Exception e) {
onApplyError("Failed to create " + getKind(entity) + " from " + sourceName + ". " + e, e);
}
} else {
throw new IllegalArgumentException("Unknown entity type " + dto);
}
}
Aggregations