diff --git a/cli/pom.xml b/cli/pom.xml
index 4d9e473..1994999 100644
--- a/cli/pom.xml
+++ b/cli/pom.xml
@@ -29,10 +29,30 @@
${project.groupId}
wildfly-glow-core
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-api
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-postgresql
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-artemis
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-keycloak
+
info.picocli
picocli
+
+ io.fabric8
+ openshift-client
+
org.jboss.slf4j
diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/Constants.java b/cli/src/main/java/org/wildfly/glow/cli/commands/Constants.java
index c9ec579..491ec3a 100644
--- a/cli/src/main/java/org/wildfly/glow/cli/commands/Constants.java
+++ b/cli/src/main/java/org/wildfly/glow/cli/commands/Constants.java
@@ -41,10 +41,13 @@ public interface Constants {
String CLOUD_OPTION = "--cloud";
String CLOUD_OPTION_SHORT = "-c";
+ String DISABLE_DEPLOYERS = "--disable-deployers";
String DOCKER_IMAGE_NAME_OPTION = "--docker-image-name";
String DOCKER_IMAGE_NAME_OPTION_LABEL = "";
String DOCKER_IMAGE_NAME_OPTION_SHORT = "-di";
-
+ String ENV_FILE_OPTION = "--env-file";
+ String ENV_FILE_OPTION_SHORT = "-ef";
+ String ENV_FILE_OPTION_LABEL = "";
String EXCLUDE_ARCHIVES_FROM_SCAN_OPTION = "--exclude-archives-from-scan";
String EXCLUDE_ARCHIVES_FROM_SCAN_OPTION_LABEL = "";
String EXCLUDE_ARCHIVES_FROM_SCAN_OPTION_SHORT = "-ea";
diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/OpenShiftSupport.java b/cli/src/main/java/org/wildfly/glow/cli/commands/OpenShiftSupport.java
new file mode 100644
index 0000000..cc4f0df
--- /dev/null
+++ b/cli/src/main/java/org/wildfly/glow/cli/commands/OpenShiftSupport.java
@@ -0,0 +1,276 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.cli.commands;
+
+import io.fabric8.kubernetes.api.model.Container;
+import io.fabric8.kubernetes.api.model.ContainerPort;
+import io.fabric8.kubernetes.api.model.EnvVar;
+import io.fabric8.kubernetes.api.model.HTTPGetAction;
+import io.fabric8.kubernetes.api.model.IntOrString;
+import io.fabric8.kubernetes.api.model.ObjectReference;
+import io.fabric8.kubernetes.api.model.Probe;
+import io.fabric8.kubernetes.api.model.Service;
+import io.fabric8.kubernetes.api.model.ServiceBuilder;
+import io.fabric8.kubernetes.api.model.ServicePort;
+import io.fabric8.kubernetes.api.model.apps.Deployment;
+import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder;
+import io.fabric8.kubernetes.client.KubernetesClientBuilder;
+import io.fabric8.kubernetes.client.Watch;
+import io.fabric8.kubernetes.client.Watcher;
+import io.fabric8.kubernetes.client.WatcherException;
+import io.fabric8.kubernetes.client.dsl.NonDeletingOperation;
+import io.fabric8.kubernetes.client.utils.Serialization;
+import io.fabric8.openshift.api.model.Build;
+import io.fabric8.openshift.api.model.BuildConfig;
+import io.fabric8.openshift.api.model.BuildConfigBuilder;
+import io.fabric8.openshift.api.model.ImageLookupPolicy;
+import io.fabric8.openshift.api.model.ImageStream;
+import io.fabric8.openshift.api.model.ImageStreamBuilder;
+import io.fabric8.openshift.api.model.Route;
+import io.fabric8.openshift.api.model.RouteBuilder;
+import io.fabric8.openshift.api.model.RouteTargetReference;
+import io.fabric8.openshift.api.model.TLSConfig;
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.ServiceLoader;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import org.jboss.galleon.util.ZipUtils;
+import org.wildfly.glow.AddOn;
+import org.wildfly.glow.GlowMessageWriter;
+import org.wildfly.glow.Layer;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
+
+/**
+ *
+ * @author jdenise
+ */
+class OpenShiftSupport {
+
+ private static void createAppDeployment(GlowMessageWriter writer, Path target, OpenShiftClient osClient, String name, Map env, boolean ha) throws Exception {
+ writer.info("Deploying application image on OpenShift");
+ Map labels = new HashMap<>();
+ labels.put(Deployer.LABEL, name);
+ ContainerPort port = new ContainerPort();
+ port.setContainerPort(8080);
+ port.setName("http");
+ port.setProtocol("TCP");
+
+ ContainerPort portAdmin = new ContainerPort();
+ portAdmin.setContainerPort(9990);
+ portAdmin.setName("admin");
+ portAdmin.setProtocol("TCP");
+
+ List ports = new ArrayList<>();
+ ports.add(port);
+ ports.add(portAdmin);
+ List vars = new ArrayList<>();
+ for (Entry entry : env.entrySet()) {
+ vars.add(new EnvVar().toBuilder().withName(entry.getKey()).withValue(entry.getValue()).build());
+ }
+ Container container = new Container();
+ container.setName(name);
+ container.setImage(name + ":latest");
+ container.setPorts(ports);
+ container.setEnv(vars);
+ container.setImagePullPolicy("IfNotPresent");
+ Probe readinessProbe = new Probe();
+ HTTPGetAction getAction = new HTTPGetAction();
+ getAction.setPath("/health/ready");
+ IntOrString pp = new IntOrString("admin");
+ getAction.setPort(pp);
+ getAction.setScheme("HTTP");
+ readinessProbe.setHttpGet(getAction);
+ readinessProbe.setTimeoutSeconds(1);
+ readinessProbe.setPeriodSeconds(10);
+ readinessProbe.setSuccessThreshold(1);
+ readinessProbe.setFailureThreshold(3);
+
+ container.setReadinessProbe(readinessProbe);
+ container.setTerminationMessagePath("/dev/termination-log");
+
+ Probe livenessProbe = new Probe();
+ HTTPGetAction getAction2 = new HTTPGetAction();
+ getAction2.setPath("/health/live");
+ IntOrString pp2 = new IntOrString("admin");
+ getAction2.setPort(pp2);
+ getAction2.setScheme("HTTP");
+ livenessProbe.setHttpGet(getAction);
+ livenessProbe.setTimeoutSeconds(1);
+ livenessProbe.setPeriodSeconds(10);
+ livenessProbe.setSuccessThreshold(1);
+ livenessProbe.setFailureThreshold(3);
+ container.setLivenessProbe(livenessProbe);
+
+ Deployment deployment = new DeploymentBuilder().withNewMetadata().withName(name).endMetadata().
+ withNewSpec().withReplicas(ha ? 2 : 1).
+ withNewSelector().withMatchLabels(labels).endSelector().
+ withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec().
+ withContainers(container).withRestartPolicy("Always").
+ endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build();
+ osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(name+"-deployment.yaml"), Serialization.asYaml(deployment).getBytes());
+ IntOrString v = new IntOrString();
+ v.setValue(8080);
+ Service service = new ServiceBuilder().withNewMetadata().withName(name).endMetadata().
+ withNewSpec().withPorts(new ServicePort().toBuilder().withProtocol("TCP").
+ withPort(8080).
+ withTargetPort(v).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(labels).endSpec().build();
+ osClient.services().resource(service).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(name+"-service.yaml"), Serialization.asYaml(service).getBytes());
+
+ writer.info("Waiting until the application is ready ...");
+ osClient.resources(Deployment.class).resource(deployment).waitUntilReady(5, TimeUnit.MINUTES);
+ }
+
+ static void deploy(GlowMessageWriter writer, Path target, String appName, Map env, Set layers, Set addOns, boolean ha,
+ Map extraEnv, boolean disableDeployers) throws Exception {
+ Map actualEnv = new HashMap<>();
+ actualEnv.putAll(env);
+ OpenShiftClient osClient = new KubernetesClientBuilder().build().adapt(OpenShiftClient.class);
+ writer.info("\nConnected to OpenShift cluster");
+ // First create the future route to the application, can be needed by deployers
+ Route route = new RouteBuilder().withNewMetadata().withName(appName).
+ endMetadata().withNewSpec().
+ withTo(new RouteTargetReference("Service", appName, 100)).
+ withTls(new TLSConfig().toBuilder().withTermination("edge").
+ withInsecureEdgeTerminationPolicy("Redirect").build()).endSpec().build();
+ osClient.routes().resource(route).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(appName+"-route.yaml"), Serialization.asYaml(route).getBytes());
+ String host = osClient.routes().resource(route).get().getSpec().getHost();
+ // Done route creation
+ for (Deployer d : ServiceLoader.load(Deployer.class)) {
+ boolean deployed = false;
+ for (Layer l : layers) {
+ if (d.getSupportedLayers().contains(l.getName())) {
+ writer.info("Found deployer " + d.getName() + " for " + l.getName());
+ deployed = true;
+ actualEnv.putAll(disableDeployers ? d.disabledDeploy(host, appName, l.getName()) : d.deploy(writer, target, osClient, env, host, appName, l.getName()));
+ break;
+ }
+ }
+ if (!deployed) {
+ for (AddOn ao : addOns) {
+ if (ao.getFamily().equals(d.getSupportedAddOnFamily())
+ && d.getSupportedAddOns().contains(ao.getName())) {
+ writer.info("Found deployer " + d.getName() + " for " + ao.getName());
+ actualEnv.putAll(disableDeployers ? d.disabledDeploy(host, appName, ao.getName()) : d.deploy(writer, target, osClient, env, host, appName, ao.getName()));
+ break;
+ }
+ }
+ }
+ }
+ if (disableDeployers) {
+ writer.info("\nOpenShift deployers are disabled.");
+ }
+ createBuild(target, osClient, appName);
+ Map filteredMap = new HashMap<>();
+ for (String k : actualEnv.keySet()) {
+ if (!k.startsWith("{")) {
+ filteredMap.put(k, actualEnv.get(k));
+ } else {
+ if(disableDeployers) {
+ String cleanK = k.replaceAll("\\{", "_");
+ filteredMap.put(cleanK, actualEnv.get(k));
+ }
+ }
+ }
+
+ filteredMap.putAll(extraEnv);
+ if (!filteredMap.isEmpty()) {
+ if(disableDeployers) {
+ writer.warn("\nThe following environment variables have been set in the " + appName + " deployment. WARN: Some of them need possibly to be updated in the deployment:");
+ } else {
+ writer.warn("\nThe following environment variables have been set in the " + appName + " deployment:");
+ }
+ for (Entry entry : filteredMap.entrySet()) {
+ writer.warn(entry.getKey() + "=" + entry.getValue());
+ }
+ }
+ createAppDeployment(writer, target, osClient, appName, filteredMap, ha);
+ writer.info("\nApplication route: https://" + host+ ("ROOT.war".equals(appName) ? "" : "/" + appName));
+ }
+
+ static void createBuild(Path target, OpenShiftClient osClient, String name) throws Exception {
+ // zip deployment and provisioning.xml to be pushed to OpenShift
+ Path file = Paths.get("openshiftApp.zip");
+ if (Files.exists(file)) {
+ Files.delete(file);
+ }
+ file.toFile().deleteOnExit();
+ ZipUtils.zip(target, file);
+ System.out.println("\nCreating and starting application image build on OpenShift (this can take up to few minutes)...");
+ ImageStream stream = new ImageStreamBuilder().withNewMetadata().withName(name).
+ endMetadata().withNewSpec().withLookupPolicy(new ImageLookupPolicy(Boolean.TRUE)).endSpec().build();
+ osClient.imageStreams().resource(stream).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(name+"-image-stream.yaml"), Serialization.asYaml(stream).getBytes());
+ BuildConfigBuilder builder = new BuildConfigBuilder();
+ ObjectReference ref = new ObjectReference();
+ ref.setKind("ImageStreamTag");
+ ref.setName(name + ":latest");
+ BuildConfig buildConfig = builder.
+ withNewMetadata().withName(name + "-build").endMetadata().withNewSpec().
+ withNewOutput().
+ withNewTo().
+ withKind("ImageStreamTag").
+ withName(name + ":latest").endTo().
+ endOutput().withNewStrategy().withNewSourceStrategy().withNewFrom().withKind("DockerImage").
+ withName("quay.io/wildfly/wildfly-s2i:latest").endFrom().
+ withIncremental(true).
+ withEnv(new EnvVar().toBuilder().withName("GALLEON_USE_LOCAL_FILE").withValue("true").build()).
+ endSourceStrategy().endStrategy().withNewSource().
+ withType("Binary").endSource().endSpec().build();
+ osClient.buildConfigs().resource(buildConfig).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(name+"-build-config.yaml"), Serialization.asYaml(buildConfig).getBytes());
+
+ Build build = osClient.buildConfigs().withName(name + "-build").instantiateBinary().fromFile(file.toFile());
+ CountDownLatch latch = new CountDownLatch(1);
+ try (Watch watcher = osClient.builds().withName(build.getMetadata().getName()).watch(getBuildWatcher(latch))) {
+ latch.await();
+ }
+ }
+
+ private static Watcher getBuildWatcher(final CountDownLatch latch) {
+ return new Watcher() {
+ @Override
+ public void eventReceived(Action action, Build build) {
+ //buildHolder.set(build);
+ String phase = build.getStatus().getPhase();
+ if("Running".equals(phase)) {
+ System.out.println("Build is running...");
+ }
+ if("Complete".equals(phase)) {
+ System.out.println("Build is complete.");
+ latch.countDown();
+ }
+ }
+
+ @Override
+ public void onClose(WatcherException cause) {
+ }
+ };
+ }
+}
diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java b/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java
index 3cd66aa..5de0e5d 100644
--- a/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java
+++ b/cli/src/main/java/org/wildfly/glow/cli/commands/ScanCommand.java
@@ -16,6 +16,7 @@
*/
package org.wildfly.glow.cli.commands;
+import java.nio.file.Files;
import org.jboss.galleon.util.IoUtils;
import org.wildfly.glow.Arguments;
import org.wildfly.glow.FeaturePacks;
@@ -33,16 +34,20 @@
import java.nio.file.Path;
import java.nio.file.Paths;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
+import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static org.wildfly.glow.Arguments.CLOUD_EXECUTION_CONTEXT;
import static org.wildfly.glow.Arguments.COMPACT_PROPERTY;
+import org.wildfly.glow.Env;
import static org.wildfly.glow.OutputFormat.BOOTABLE_JAR;
import static org.wildfly.glow.OutputFormat.DOCKER_IMAGE;
+import static org.wildfly.glow.OutputFormat.OPENSHIFT;
@CommandLine.Command(
name = Constants.SCAN_COMMAND,
@@ -94,6 +99,11 @@ public class ScanCommand extends AbstractCommand {
split = ",", paramLabel = Constants.EXCLUDE_ARCHIVES_FROM_SCAN_OPTION_LABEL)
Set excludeArchivesFromScan = new HashSet<>();
+ @CommandLine.Option(names = {Constants.ENV_FILE_OPTION_SHORT, Constants.ENV_FILE_OPTION}, paramLabel = Constants.ENV_FILE_OPTION_LABEL)
+ Optional envFile;
+
+ @CommandLine.Option(names = Constants.DISABLE_DEPLOYERS)
+ Optional disableDeployers;
@Override
public Integer call() throws Exception {
HiddenPropertiesAccessor hiddenPropertiesAccessor = new HiddenPropertiesAccessor();
@@ -122,6 +132,29 @@ public Integer call() throws Exception {
if (wildflyServerVersion.isPresent()) {
builder.setVersion(wildflyServerVersion.get());
}
+ Map extraEnv = new HashMap<>();
+ if (envFile.isPresent()) {
+ if (provision.isPresent()) {
+ if (!OPENSHIFT.equals(provision.get())) {
+ throw new Exception("Env file is only usable when --provision=" + OPENSHIFT + " option is set.");
+ }
+ } else {
+ throw new Exception("Env file is only usable when --provision=" + OPENSHIFT + " option is set.");
+ }
+ Path p = envFile.get();
+ if (!Files.exists(p)) {
+ throw new Exception(p + " file doesn't exist");
+ }
+ for(String l : Files.readAllLines(p)) {
+ if (!l.startsWith("#")) {
+ int i = l.indexOf("=");
+ if (i < 0 || i == l.length() - 1) {
+ throw new Exception("Invalid environment variable " + l + " in " + p);
+ }
+ extraEnv.put(l.substring(0, i), l.substring(i+1));
+ }
+ }
+ }
builder.setVerbose(verbose);
if (!addOns.isEmpty()) {
builder.setUserEnabledAddOns(addOns);
@@ -139,6 +172,9 @@ public Integer call() throws Exception {
if (DOCKER_IMAGE.equals(provision.get()) && !cloud.orElse(false)) {
throw new Exception("Can't produce a Docker image if cloud is not enabled. Use the " + Constants.CLOUD_OPTION + " option.");
}
+ if (OPENSHIFT.equals(provision.get()) && !cloud.orElse(false)) {
+ throw new Exception("Can't build/deploy on openShift if cloud is not enabled. Use the " + Constants.CLOUD_OPTION + " option.");
+ }
builder.setOutput(provision.get());
}
builder.setExcludeArchivesFromScan(excludeArchivesFromScan);
@@ -204,6 +240,10 @@ public Integer call() throws Exception {
print("@|bold Generating docker image...|@");
break;
}
+ case OPENSHIFT: {
+ print("@|bold Openshift build and deploy...|@");
+ break;
+ }
}
OutputContent content = scanResults.outputConfig(target, dockerImageName.orElse(null));
Path base = Paths.get("").toAbsolutePath();
@@ -221,7 +261,9 @@ public Integer call() throws Exception {
break;
}
case ENV_FILE: {
- print("@|bold The file " + rel + " contains the list of environment variables that you must set prior to start the server.|@");
+ if (!OutputFormat.OPENSHIFT.equals(provision.get())) {
+ print("@|bold The file " + rel + " contains the list of environment variables that you must set prior to start the server.|@");
+ }
switch (provision.get()) {
case SERVER: {
print("@|bold Export the suggested env variables for the server to take them into account.|@");
@@ -239,9 +281,14 @@ public Integer call() throws Exception {
break;
}
case PROVISIONING_XML_FILE: {
- print("@|bold Generation DONE.|@");
- print("@|bold Galleon Provisioning configuration is located in " + rel + " file|@");
+ switch (provision.get()) {
+ case PROVISIONING_XML: {
+ print("@|bold Generation DONE.|@");
+ print("@|bold Galleon Provisioning configuration is located in " + rel + " file|@");
+ }
+ }
break;
+
}
case SERVER_DIR: {
print("@|bold Provisioning DONE.|@");
@@ -254,6 +301,23 @@ public Integer call() throws Exception {
}
}
}
+ if (OutputFormat.OPENSHIFT.equals(provision.get())) {
+ String name = null;
+ for (Path p : deployments) {
+ Files.copy(p, target.resolve(p.getFileName()));
+ int ext = p.getFileName().toString().indexOf(".");
+ name = p.getFileName().toString().substring(0, ext);
+ }
+ Map envMap = new HashMap<>();
+ for(Set envs : scanResults.getSuggestions().getStronglySuggestedConfigurations().values()) {
+ for(Env env : envs) {
+ envMap.put(env.getName(), env.getDescription());
+ }
+ }
+ OpenShiftSupport.deploy(GlowMessageWriter.DEFAULT, target, name == null ? "app-from-wildfly-glow" : name, envMap, scanResults.getDiscoveredLayers(),
+ scanResults.getEnabledAddOns(), haProfile.orElse(false), extraEnv, disableDeployers.orElse(false));
+ print("@|bold Openshift build and deploy DONE.|@");
+ }
if (content.getDockerImageName() != null) {
print("@|bold To run the image call: 'docker run " + content.getDockerImageName() + "'|@");
}
diff --git a/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java b/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java
index 43d57b6..7d4c55c 100644
--- a/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java
+++ b/cli/src/main/java/org/wildfly/glow/cli/commands/ShowConfigurationCommand.java
@@ -20,6 +20,7 @@
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Optional;
+import java.util.ServiceLoader;
import java.util.Set;
import java.util.TreeSet;
import org.jboss.galleon.api.config.GalleonFeaturePackConfig;
@@ -29,6 +30,7 @@
import org.wildfly.glow.FeaturePacks;
import org.wildfly.glow.Layer;
import org.wildfly.glow.LayerMapping;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
import picocli.CommandLine;
@@ -53,6 +55,18 @@ public class ShowConfigurationCommand extends AbstractCommand {
@Override
public Integer call() throws Exception {
print("Wildfly Glow is retrieving known provisioning configuration...");
+ StringBuilder ocBuilder = new StringBuilder();
+ ocBuilder.append("\nDeployers enabled when provisioning to OpenShift:\n");
+ for (Deployer d : ServiceLoader.load(Deployer.class)) {
+ ocBuilder.append("* " + d.getName() + ". Enabled when one of the following ");
+ if (!d.getSupportedLayers().isEmpty()) {
+ ocBuilder.append("layer(s) " + d.getSupportedLayers() + " is/are discovered.\n");
+ } else {
+ ocBuilder.append("add-on(s) " + d.getSupportedAddOns() + " is/are enabled.\n");
+ }
+ }
+ print(ocBuilder.toString());
+
String context = Arguments.BARE_METAL_EXECUTION_CONTEXT;
if (cloud.orElse(false)) {
context = Arguments.CLOUD_EXECUTION_CONTEXT;
diff --git a/cli/src/main/resources/UsageMessages.properties b/cli/src/main/resources/UsageMessages.properties
index b7c1e20..b675df7 100644
--- a/cli/src/main/resources/UsageMessages.properties
+++ b/cli/src/main/resources/UsageMessages.properties
@@ -31,9 +31,12 @@ suggest = WildFly Glow will suggest additional add-ons and environment variables
add-ons = List of add-ons to enable. To get the list of possible add-ons, use the @|fg(yellow) show-add-ons|@ command.
deployments = List of path to war|jar|ear files to scan.
input-feature-packs-file = Galleon feature-packs used by wildfly-glow are retrieved from an online registry. To override the set of feature-packs you can specify a path to a Galleon provisioning XML file containing the set of Galleon feature-packs to be used by wildfly-glow.
-provision = The kind of provisioning to produce based on what has been discovered. Can be @|fg(yellow) SERVER|@: a provisioned WildFly server, @|fg(yellow) BOOTABLE_JAR|@: a WildFly Bootable JAR, @|fg(yellow) DOCKER_IMAGE|@: a Docker image or @|fg(yellow) PROVISIONING_XML|@: a Galleon provisioning.xml file.
+provision = The kind of provisioning to produce based on what has been discovered. Can be @|fg(yellow) SERVER|@: a provisioned WildFly server, @|fg(yellow) BOOTABLE_JAR|@: a WildFly Bootable JAR, @|fg(yellow) DOCKER_IMAGE|@: a Docker image, @|fg(yellow) OPENSHIFT|@: a server built and deploy on OpenShift, you must be logged to a cluster, or @|fg(yellow) PROVISIONING_XML|@: a Galleon provisioning.xml file.
output-dir = If specifying to provision, the directory where the result will be output.
wildfly-preview = Use only WildFly preview feature-packs as input.
+env-file = The path to a file that contains environment variables (in the form env=value) to be passed to the OpenShift deployment. Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning.
+disable-deployers = Set this option in order to disable the deployment of additional deployments (e.g.: database, jms broker, ...). Can only be used with @|fg(yellow) OPENSHIFT|@ kind of provisioning.
+
usage.synopsisHeading = %nUsage:\u0020
# for the main command do not prepend with new line character:
wildfly-glow.usage.synopsisHeading = Usage:\u0020
diff --git a/core/src/main/java/org/wildfly/glow/GlowSession.java b/core/src/main/java/org/wildfly/glow/GlowSession.java
index b0f6a99..54fe702 100644
--- a/core/src/main/java/org/wildfly/glow/GlowSession.java
+++ b/core/src/main/java/org/wildfly/glow/GlowSession.java
@@ -505,6 +505,8 @@ OutputContent outputConfig(ScanResults scanResults, Path target, String dockerIm
if (scanResults.getErrorSession().hasErrors()) {
writer.warn("You are provisioning a server although some errors still exist. You should first fix them.");
}
+ }
+ if (!OutputFormat.PROVISIONING_XML.equals(arguments.getOutput()) && !OutputFormat.OPENSHIFT.equals(arguments.getOutput())) {
Path generatedArtifact = provisionServer(arguments.getBinaries(),
scanResults.getProvisioningConfig(), resolver, arguments.getOutput(),
arguments.isCloud(), target);
@@ -529,8 +531,18 @@ OutputContent outputConfig(ScanResults scanResults, Path target, String dockerIm
files.put(OutputContent.OutputFile.SERVER_DIR, generatedArtifact.toAbsolutePath());
break;
}
+ case OPENSHIFT: {
+ Files.createDirectories(target.resolve("galleon"));
+ Path prov = target.resolve("provisioning.xml");
+ provisioning.storeProvisioningConfig(scanResults.getProvisioningConfig(),prov);
+ files.put(OutputContent.OutputFile.PROVISIONING_XML_FILE, prov.toAbsolutePath());
+ break;
+ }
}
} else {
+ if (OutputFormat.OPENSHIFT.equals(arguments.getOutput())) {
+ target = target.resolve("galleon");
+ }
Files.createDirectories(target);
Path prov = target.resolve("provisioning.xml");
provisioning.storeProvisioningConfig(scanResults.getProvisioningConfig(),prov);
diff --git a/core/src/main/java/org/wildfly/glow/OutputFormat.java b/core/src/main/java/org/wildfly/glow/OutputFormat.java
index f2d9f8d..dfe7378 100644
--- a/core/src/main/java/org/wildfly/glow/OutputFormat.java
+++ b/core/src/main/java/org/wildfly/glow/OutputFormat.java
@@ -25,7 +25,8 @@ public enum OutputFormat {
PROVISIONING_XML("provisioning.xml", "Galleon provisioning file usable with Galleon tooling."),
SERVER("server", "Provision a WildFly server."),
BOOTABLE_JAR("bootable-jar", "Provision a WildFly bootable jar."),
- DOCKER_IMAGE("docker-image", "Produce a docker image.");
+ DOCKER_IMAGE("docker-image", "Produce a docker image."),
+ OPENSHIFT("openshift", "Build and deploy on OpenShift.");
public final String name;
public final String description;
diff --git a/docs/guide/intro/index.adoc b/docs/guide/intro/index.adoc
index 879541c..83b4e16 100644
--- a/docs/guide/intro/index.adoc
+++ b/docs/guide/intro/index.adoc
@@ -34,7 +34,44 @@ WildFly Glow does more than identifying Galleon Feature-packs and Layers.
#### Provisioning
-WildFly Glow CLI allows you to provision a WildFly server, a WildFly Bootable JAR or produce a Docker image.
+WildFly Glow CLI allows you to provision a WildFly server, a WildFly Bootable JAR, produce a Docker image and deploy your application to OpenShift.
+
+##### OpenShift provisioning
+
+A good way to start with OpenShift is by using the link:https://developers.redhat.com/developer-sandbox[OpenShift Sandbox].
+You can create a cluster in few clicks.
+
+###### Pre-requisites
+
+Once you have a custer up and running, there are few steps needed in order for WildFly Glow to log onto the cluster:
+
+* Download the `oc` command from your cluster. Click on the `?` icon on the top right corner of the OpenShift admin console, then select `Command Line Tools`,
+then download `oc` for your platform.
+* Retrieve the command to log to the Cluster. Click on your user name on the top right corner,
+then select `Copy login command`. This will open a page, copy the login command.
+That should be something like: `oc login --token=sha256~ITC16QZxiVk5vm7NCdrRIx2yqvlB-L_6Wg-BrtIhnLE --server=https://api.sandbox-m3.1530.p1.openshiftapps.com:6443`
+
+* Paste the login command in your terminal.
+
+WildFly Glow can now interact with your cluster.
+
+###### Deploying to OpenShift
+
+That is done from the `wildfly-glow` command line tool. You can specify `--cloud --provision OPENSHIFT` to the `scan` command. For example:
+
+`wildfly-glow scan examples/kitchensink.war --cloud --provision OPENSHIFT`
+
+The kitchensink war file is analyzed, the Galleon configuration is generated, then both the war and the configuration are sent to OpenShift to start a server
+provisioning and create your application deployment.
+
+At the end of the build, the application is deployed and the route to your application inside the cluster is printed.
+Use it to interact with your application.
+
+###### Automatic deployment of PostGreSQL, Artemis JMS Broker and Keycloak
+
+If WildFly Glow detects the need for these technologies, it will automatically deploy the required servers and will bound the application to them.
+
+This feature is currently specified by this link:https://github.com/wildfly/wildfly-glow/issues/49[GitHub Issue].
#### WildFly additional features discovery
diff --git a/openshift-deployment/api/pom.xml b/openshift-deployment/api/pom.xml
new file mode 100644
index 0000000..c9d70be
--- /dev/null
+++ b/openshift-deployment/api/pom.xml
@@ -0,0 +1,23 @@
+
+
+ 4.0.0
+
+ org.wildfly.glow
+ wildfly-glow-openshift-deployment
+ 1.0.0.Beta9-SNAPSHOT
+
+
+ wildfly-glow-openshift-deployment-api
+ jar
+
+
+ ${project.groupId}
+ wildfly-glow-core
+
+
+ io.fabric8
+ openshift-client
+
+
+
\ No newline at end of file
diff --git a/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java
new file mode 100644
index 0000000..7d79034
--- /dev/null
+++ b/openshift-deployment/api/src/main/java/org/wildfly/glow/deployment/openshift/api/Deployer.java
@@ -0,0 +1,53 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.deployment.openshift.api;
+
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+import org.wildfly.glow.GlowMessageWriter;
+
+/**
+ *
+ * @author jdenise
+ */
+public interface Deployer {
+
+ static final String LABEL = "deployment";
+
+ String getName();
+
+ Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, Map env, String appHost, String appName, String matching) throws Exception;
+
+ default Map disabledDeploy(String appHost, String appName, String matching) {
+ return Collections.emptyMap();
+ }
+
+ default Set getSupportedLayers() {
+ return Collections.emptySet();
+ }
+
+ default String getSupportedAddOnFamily() {
+ return null;
+ }
+
+ default Set getSupportedAddOns() {
+ return Collections.emptySet();
+ }
+}
diff --git a/openshift-deployment/artemis-broker/pom.xml b/openshift-deployment/artemis-broker/pom.xml
new file mode 100644
index 0000000..1a780cf
--- /dev/null
+++ b/openshift-deployment/artemis-broker/pom.xml
@@ -0,0 +1,19 @@
+
+
+ 4.0.0
+
+ org.wildfly.glow
+ wildfly-glow-openshift-deployment
+ 1.0.0.Beta9-SNAPSHOT
+
+
+ wildfly-glow-openshift-deployment-artemis
+ jar
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-api
+
+
+
\ No newline at end of file
diff --git a/openshift-deployment/artemis-broker/src/main/java/org/wildfly/glow/deployment/openshift/artemis/ArtemisDeployer.java b/openshift-deployment/artemis-broker/src/main/java/org/wildfly/glow/deployment/openshift/artemis/ArtemisDeployer.java
new file mode 100644
index 0000000..84e6353
--- /dev/null
+++ b/openshift-deployment/artemis-broker/src/main/java/org/wildfly/glow/deployment/openshift/artemis/ArtemisDeployer.java
@@ -0,0 +1,128 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.deployment.openshift.artemis;
+
+import io.fabric8.kubernetes.api.model.Container;
+import io.fabric8.kubernetes.api.model.ContainerPort;
+import io.fabric8.kubernetes.api.model.EnvVar;
+import io.fabric8.kubernetes.api.model.IntOrString;
+import io.fabric8.kubernetes.api.model.Service;
+import io.fabric8.kubernetes.api.model.ServiceBuilder;
+import io.fabric8.kubernetes.api.model.ServicePort;
+import io.fabric8.kubernetes.api.model.apps.Deployment;
+import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder;
+import io.fabric8.kubernetes.client.dsl.NonDeletingOperation;
+import io.fabric8.kubernetes.client.utils.Serialization;
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.wildfly.glow.GlowMessageWriter;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
+
+/**
+ *
+ * @author jdenise
+ */
+public class ArtemisDeployer implements Deployer {
+
+ private static final String REMOTE_BROKER_NAME = "artemis-broker";
+ private static final String REMOTE_BROKER_PASSWORD = "admin";
+ private static final String REMOTE_BROKER_USER = "admin";
+
+ private static final Map REMOTE_BROKER_CONNECTION_MAP = new HashMap<>();
+ private static final Map REMOTE_BROKER_APP_MAP = new HashMap<>();
+
+ static {
+
+ REMOTE_BROKER_CONNECTION_MAP.put("AMQ_USER", REMOTE_BROKER_USER);
+ REMOTE_BROKER_CONNECTION_MAP.put("AMQ_PASSWORD", REMOTE_BROKER_PASSWORD);
+ REMOTE_BROKER_CONNECTION_MAP.put("AMQ_DATA_DIR", "/home/jboss/data");
+
+ REMOTE_BROKER_APP_MAP.put("MQ_SERVICE_PREFIX_MAPPING", "broker-amq7=BROKER_AMQ");
+ REMOTE_BROKER_APP_MAP.put("MQ_SERVICE_PREFIX_MAPPING", "broker-amq7=BROKER_AMQ");
+ REMOTE_BROKER_APP_MAP.put("BROKER_AMQ_TCP_SERVICE_HOST", REMOTE_BROKER_NAME);
+ REMOTE_BROKER_APP_MAP.put("BROKER_AMQ_TCP_SERVICE_PORT", "61616");
+ REMOTE_BROKER_APP_MAP.put("BROKER_AMQ_PASSWORD", REMOTE_BROKER_PASSWORD);
+ REMOTE_BROKER_APP_MAP.put("BROKER_AMQ_USERNAME", REMOTE_BROKER_USER);
+ }
+
+ @Override
+ public Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient,
+ Map env, String appHost, String appName, String matching) throws Exception {
+ writer.info("\nDeploying Artemis Messaging Broker");
+ Map labels = new HashMap<>();
+ labels.put(LABEL, REMOTE_BROKER_NAME);
+ ContainerPort port = new ContainerPort();
+ port.setContainerPort(61616);
+ port.setProtocol("TCP");
+ List ports = new ArrayList<>();
+ ports.add(port);
+ List vars = new ArrayList<>();
+ for (Map.Entry entry : REMOTE_BROKER_CONNECTION_MAP.entrySet()) {
+ vars.add(new EnvVar().toBuilder().withName(entry.getKey()).withValue(entry.getValue()).build());
+ }
+ Container container = new Container();
+ container.setName(REMOTE_BROKER_NAME);
+ container.setImage("quay.io/artemiscloud/activemq-artemis-broker-kubernetes");
+ container.setPorts(ports);
+ container.setEnv(vars);
+ container.setImagePullPolicy("IfNotPresent");
+
+ Deployment deployment = new DeploymentBuilder().withNewMetadata().withName(REMOTE_BROKER_NAME).endMetadata().
+ withNewSpec().withReplicas(1).
+ withNewSelector().withMatchLabels(labels).endSelector().
+ withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec().
+ withContainers(container).withRestartPolicy("Always").
+ endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build();
+ osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(REMOTE_BROKER_NAME + "-deployment.yaml"), Serialization.asYaml(deployment).getBytes());
+ IntOrString v = new IntOrString();
+ v.setValue(61616);
+ Service service = new ServiceBuilder().withNewMetadata().withName(REMOTE_BROKER_NAME).endMetadata().
+ withNewSpec().withPorts(new ServicePort().toBuilder().withName("61616-tcp").withProtocol("TCP").
+ withPort(61616).
+ withTargetPort(v).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(labels).endSpec().build();
+ osClient.services().resource(service).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(REMOTE_BROKER_NAME + "-service.yaml"), Serialization.asYaml(service).getBytes());
+ return REMOTE_BROKER_APP_MAP;
+ }
+
+ @Override
+ public String getSupportedAddOnFamily() {
+ return "messaging";
+ }
+
+ @Override
+ public Set getSupportedAddOns() {
+ Set ret = new HashSet<>();
+ ret.add("cloud-remote-activemq");
+ return ret;
+ }
+
+ @Override
+ public String getName() {
+ return "Artemis Broker";
+ }
+
+}
diff --git a/openshift-deployment/artemis-broker/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer b/openshift-deployment/artemis-broker/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
new file mode 100644
index 0000000..6d47519
--- /dev/null
+++ b/openshift-deployment/artemis-broker/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
@@ -0,0 +1 @@
+org.wildfly.glow.deployment.openshift.artemis.ArtemisDeployer
diff --git a/openshift-deployment/keycloak/pom.xml b/openshift-deployment/keycloak/pom.xml
new file mode 100644
index 0000000..9c4174e
--- /dev/null
+++ b/openshift-deployment/keycloak/pom.xml
@@ -0,0 +1,19 @@
+
+
+ 4.0.0
+
+ org.wildfly.glow
+ wildfly-glow-openshift-deployment
+ 1.0.0.Beta9-SNAPSHOT
+
+
+ wildfly-glow-openshift-deployment-keycloak
+ jar
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-api
+
+
+
\ No newline at end of file
diff --git a/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java b/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java
new file mode 100644
index 0000000..29d8095
--- /dev/null
+++ b/openshift-deployment/keycloak/src/main/java/org/wildfly/glow/deployment/openshift/keycloak/KeycloakDeployer.java
@@ -0,0 +1,123 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.deployment.openshift.keycloak;
+
+import io.fabric8.kubernetes.api.model.KubernetesList;
+import io.fabric8.kubernetes.client.dsl.NonDeletingOperation;
+import io.fabric8.kubernetes.client.utils.Serialization;
+import io.fabric8.openshift.api.model.Route;
+import io.fabric8.openshift.api.model.RouteBuilder;
+import io.fabric8.openshift.api.model.Template;
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import org.wildfly.glow.GlowMessageWriter;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
+
+/**
+ *
+ * @author jdenise
+ */
+public class KeycloakDeployer implements Deployer {
+
+ private static final String KEYCLOAK_TEMPLATE_URL = "https://raw.githubusercontent.com/keycloak/keycloak-quickstarts/23.0.7/openshift/keycloak.yaml";
+ private static final String KEYCLOAK_NAME = "keycloak";
+ private static final String WILDFLY_REALM_PATH = "/realms/WildFly";
+ private static final String KEYCLOAK_ADMIN = "admin";
+ private static final String KEYCLOAK_ADMIN_PASSWORD = "admin";
+ private static final String KEYCLOAK_DEMO = "demo";
+ private static final String KEYCLOAK_DEMO_PASSWORD = "demo";
+ private static final String KEYCLOAK_ADMIN_ENV = "KEYCLOAK_ADMIN";
+ private static final String KEYCLOAK_ADMIN_PASSWORD_ENV = "KEYCLOAK_ADMIN_PASSWORD";
+ private static final String OIDC_PROVIDER_URL_ENV = "OIDC_PROVIDER_URL";
+ private static final String OIDC_PROVIDER_NAME_ENV = "OIDC_PROVIDER_NAME";
+ private static final String OIDC_SECURE_DEPLOYMENT_SECRET_ENV = "OIDC_SECURE_DEPLOYMENT_SECRET";
+ private static final String OIDC_USER_NAME_ENV = "OIDC_USER_NAME";
+ private static final String OIDC_USER_PASSWORD_ENV = "OIDC_USER_PASSWORD";
+ private static final String OIDC_HOSTNAME_HTTPS_ENV = "OIDC_HOSTNAME_HTTPS";
+ private static final String MYSECRET = "mysecret";
+ private static final String NAMESPACE_ENV = "NAMESPACE";
+
+ @Override
+ public Map disabledDeploy(String appHost, String appName, String matching) {
+ Map ret = new HashMap<>();
+ ret.put(OIDC_HOSTNAME_HTTPS_ENV, appHost);
+ return ret;
+ }
+ @Override
+ public Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient, Map env,
+ String appHost, String appName, String matching) throws Exception {
+ writer.info("\nDeploying Keycloak server");
+ Map parameters = new HashMap<>();
+ parameters.put(KEYCLOAK_ADMIN_ENV, KEYCLOAK_ADMIN);
+ parameters.put(KEYCLOAK_ADMIN_PASSWORD_ENV, KEYCLOAK_ADMIN_PASSWORD);
+ parameters.put(NAMESPACE_ENV, osClient.getNamespace());
+ Template t = osClient.templates().
+ load(new URL(KEYCLOAK_TEMPLATE_URL)).createOr(NonDeletingOperation::update);
+ final KubernetesList processedTemplateWithCustomParameters = osClient.templates().
+ withName(KEYCLOAK_NAME)
+ .process(parameters);
+ osClient.resourceList(processedTemplateWithCustomParameters).createOrReplace();
+ Files.write(target.resolve(KEYCLOAK_NAME + "-resources.yaml"), Serialization.asYaml(processedTemplateWithCustomParameters).getBytes());
+ Route route = new RouteBuilder().withNewMetadata().withName(KEYCLOAK_NAME).
+ endMetadata().build();
+ String host = osClient.routes().resource(route).get().getSpec().getHost();
+ String url = "https://" + host;
+ writer.info("\nKeycloak route: " + url);
+ Map retEnv = new HashMap<>();
+ String realmUrl = url + WILDFLY_REALM_PATH;
+ writer.warn("\nNOTE: Some actions must be taken from the keycloack console.");
+ writer.warn("1- Use admin/admin to log to the console " + url);
+ writer.warn("2- Create a realm named WildFly");
+ writer.warn("3- Create a user named demo, password demo");
+ writer.warn("4- Create a role needed by your application and assign it to the demo user");
+ if (env.containsKey(OIDC_PROVIDER_URL_ENV)) {
+ writer.warn("5- Assign the role 'realm-management create-client' to the demo user");
+ } else {
+ writer.warn("5 - Create an OIDC Client named the way your OIDC configuration expects it. "
+ + "Set its Root URL to 'https://" + appHost + ("ROOT.war".equals(appName) ? "" : "/" + appName) + "'");
+ }
+ retEnv.put(OIDC_PROVIDER_URL_ENV, realmUrl);
+ if (env.containsKey(OIDC_PROVIDER_URL_ENV)) {
+ retEnv.put(OIDC_PROVIDER_NAME_ENV, KEYCLOAK_NAME);
+ retEnv.put(OIDC_SECURE_DEPLOYMENT_SECRET_ENV, MYSECRET);
+ retEnv.put(OIDC_USER_NAME_ENV, KEYCLOAK_DEMO);
+ retEnv.put(OIDC_USER_PASSWORD_ENV, KEYCLOAK_DEMO_PASSWORD);
+ retEnv.put(OIDC_HOSTNAME_HTTPS_ENV, appHost);
+ }
+ return retEnv;
+ }
+
+ @Override
+ public Set getSupportedLayers() {
+ Set ret = new HashSet<>();
+ ret.add("elytron-oidc-client");
+ return ret;
+ }
+
+ @Override
+ public String getName() {
+ return KEYCLOAK_NAME;
+ }
+
+}
diff --git a/openshift-deployment/keycloak/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer b/openshift-deployment/keycloak/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
new file mode 100644
index 0000000..d5ea509
--- /dev/null
+++ b/openshift-deployment/keycloak/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
@@ -0,0 +1,2 @@
+org.wildfly.glow.deployment.openshift.keycloak.KeycloakDeployer
+
diff --git a/openshift-deployment/pom.xml b/openshift-deployment/pom.xml
new file mode 100644
index 0000000..b159180
--- /dev/null
+++ b/openshift-deployment/pom.xml
@@ -0,0 +1,25 @@
+
+
+ 4.0.0
+
+ org.wildfly.glow
+ wildfly-glow-parent
+ 1.0.0.Beta9-SNAPSHOT
+
+
+ wildfly-glow-openshift-deployment
+ pom
+
+ UTF-8
+ 11
+ 11
+ ../checkstyle/checkstyle.xml
+
+
+ api
+ keycloak
+ postgresql
+ artemis-broker
+
+
\ No newline at end of file
diff --git a/openshift-deployment/postgresql/pom.xml b/openshift-deployment/postgresql/pom.xml
new file mode 100644
index 0000000..671e07f
--- /dev/null
+++ b/openshift-deployment/postgresql/pom.xml
@@ -0,0 +1,19 @@
+
+
+ 4.0.0
+
+ org.wildfly.glow
+ wildfly-glow-openshift-deployment
+ 1.0.0.Beta9-SNAPSHOT
+
+
+ wildfly-glow-openshift-deployment-postgresql
+ jar
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-api
+
+
+
\ No newline at end of file
diff --git a/openshift-deployment/postgresql/src/main/java/org/wildfly/glow/deployment/openshift/postgresql/PostgreSQLDeployer.java b/openshift-deployment/postgresql/src/main/java/org/wildfly/glow/deployment/openshift/postgresql/PostgreSQLDeployer.java
new file mode 100644
index 0000000..88b61df
--- /dev/null
+++ b/openshift-deployment/postgresql/src/main/java/org/wildfly/glow/deployment/openshift/postgresql/PostgreSQLDeployer.java
@@ -0,0 +1,132 @@
+/*
+ * JBoss, Home of Professional Open Source.
+ * Copyright 2024 Red Hat, Inc., and individual contributors
+ * as indicated by the @author tags.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.wildfly.glow.deployment.openshift.postgresql;
+
+import io.fabric8.kubernetes.api.model.Container;
+import io.fabric8.kubernetes.api.model.ContainerPort;
+import io.fabric8.kubernetes.api.model.EnvVar;
+import io.fabric8.kubernetes.api.model.IntOrString;
+import io.fabric8.kubernetes.api.model.Service;
+import io.fabric8.kubernetes.api.model.ServiceBuilder;
+import io.fabric8.kubernetes.api.model.ServicePort;
+import io.fabric8.kubernetes.api.model.apps.Deployment;
+import io.fabric8.kubernetes.api.model.apps.DeploymentBuilder;
+import io.fabric8.kubernetes.client.dsl.NonDeletingOperation;
+import io.fabric8.kubernetes.client.utils.Serialization;
+import io.fabric8.openshift.client.OpenShiftClient;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.wildfly.glow.GlowMessageWriter;
+import org.wildfly.glow.deployment.openshift.api.Deployer;
+
+/**
+ *
+ * @author jdenise
+ */
+public class PostgreSQLDeployer implements Deployer {
+
+ private static final String POSTGRESQL_NAME = "postgresql";
+ private static final String POSTGRESQL_SAMPLEDB = "sampledb";
+ private static final String POSTGRESQL_PASSWORD = "admin";
+ private static final String POSTGRESQL_USER = "admin";
+ private static final String POSTGRESQL_SERVICE_PORT = "5432";
+ private static final String POSTGRESQL_SERVICE_HOST = POSTGRESQL_NAME;
+
+ private static final String POSTGRESQL_SERVICE_PORT_ENV = "POSTGRESQL_SERVICE_PORT";
+ private static final String POSTGRESQL_SERVICE_HOST_ENV = "POSTGRESQL_SERVICE_HOST";
+ private static final Map POSTGRESQL_CONNECTION_MAP = new HashMap<>();
+ private static final Map POSTGRESQL_APP_MAP = new HashMap<>();
+
+ static {
+ POSTGRESQL_CONNECTION_MAP.put("POSTGRESQL_DATABASE", POSTGRESQL_SAMPLEDB);
+ POSTGRESQL_CONNECTION_MAP.put("POSTGRESQL_PASSWORD", POSTGRESQL_PASSWORD);
+ POSTGRESQL_CONNECTION_MAP.put("POSTGRESQL_USER", POSTGRESQL_USER);
+ POSTGRESQL_APP_MAP.putAll(POSTGRESQL_CONNECTION_MAP);
+ POSTGRESQL_APP_MAP.put(POSTGRESQL_SERVICE_PORT_ENV, POSTGRESQL_SERVICE_PORT);
+ POSTGRESQL_APP_MAP.put(POSTGRESQL_SERVICE_HOST_ENV, POSTGRESQL_SERVICE_HOST);
+ }
+
+ @Override
+ public Map disabledDeploy(String appHost, String appName, String matching) {
+ Map ret = new HashMap<>();
+ ret.put(POSTGRESQL_SERVICE_HOST_ENV, "PostgreSQL server host name.");
+ ret.put(POSTGRESQL_SERVICE_PORT_ENV, "PostgreSQL server port.");
+ return ret;
+ }
+
+ @Override
+ public Map deploy(GlowMessageWriter writer, Path target, OpenShiftClient osClient,
+ Map env, String appHost, String appName, String matching) throws Exception {
+ writer.info("\nDeploying PosgreSQL server");
+ Map labels = new HashMap<>();
+ labels.put(LABEL, POSTGRESQL_NAME);
+ ContainerPort port = new ContainerPort();
+ port.setContainerPort(5432);
+ port.setProtocol("TCP");
+ List ports = new ArrayList<>();
+ ports.add(port);
+ List vars = new ArrayList<>();
+ for (Map.Entry entry : POSTGRESQL_CONNECTION_MAP.entrySet()) {
+ vars.add(new EnvVar().toBuilder().withName(entry.getKey()).withValue(entry.getValue()).build());
+ }
+ Container container = new Container();
+ container.setName(POSTGRESQL_NAME);
+ container.setImage("registry.redhat.io/rhel8/postgresql-15");
+ container.setPorts(ports);
+ container.setEnv(vars);
+ container.setImagePullPolicy("IfNotPresent");
+
+ Deployment deployment = new DeploymentBuilder().withNewMetadata().withName(POSTGRESQL_NAME).endMetadata().
+ withNewSpec().withReplicas(1).
+ withNewSelector().withMatchLabels(labels).endSelector().
+ withNewTemplate().withNewMetadata().withLabels(labels).endMetadata().withNewSpec().
+ withContainers(container).withRestartPolicy("Always").
+ endSpec().endTemplate().withNewStrategy().withType("RollingUpdate").endStrategy().endSpec().build();
+ osClient.resources(Deployment.class).resource(deployment).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(POSTGRESQL_NAME + "-deployment.yaml"), Serialization.asYaml(deployment).getBytes());
+ IntOrString v = new IntOrString();
+ v.setValue(5432);
+ Service service = new ServiceBuilder().withNewMetadata().withName(POSTGRESQL_NAME).endMetadata().
+ withNewSpec().withPorts(new ServicePort().toBuilder().withName("5432-tcp").withProtocol("TCP").
+ withPort(5432).
+ withTargetPort(v).build()).withType("ClusterIP").withSessionAffinity("None").withSelector(labels).endSpec().build();
+ osClient.services().resource(service).createOr(NonDeletingOperation::update);
+ Files.write(target.resolve(POSTGRESQL_NAME + "-service.yaml"), Serialization.asYaml(service).getBytes());
+ return POSTGRESQL_APP_MAP;
+ }
+
+ @Override
+ public Set getSupportedLayers() {
+ Set ret = new HashSet<>();
+ ret.add("postgresql-datasource");
+ ret.add("postgresql-driver");
+ return ret;
+ }
+
+ @Override
+ public String getName() {
+ return "PostgreSQL database";
+ }
+
+}
diff --git a/openshift-deployment/postgresql/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer b/openshift-deployment/postgresql/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
new file mode 100644
index 0000000..7c04fbd
--- /dev/null
+++ b/openshift-deployment/postgresql/src/main/resources/META-INF/services/org.wildfly.glow.deployment.openshift.api.Deployer
@@ -0,0 +1,2 @@
+org.wildfly.glow.deployment.openshift.postgresql.PostgreSQLDeployer
+
diff --git a/pom.xml b/pom.xml
index 2351628..45b0dd5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -40,6 +40,7 @@
3.6.4
3.2.1
2.22.2
+ 6.10.0
1.7.0.Alpha13
2.0.0
@@ -217,6 +218,26 @@
wildfly-glow-arquillian-plugin-scanner
${project.version}
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-api
+ ${project.version}
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-postgresql
+ ${project.version}
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-artemis
+ ${project.version}
+
+
+ ${project.groupId}
+ wildfly-glow-openshift-deployment-keycloak
+ ${project.version}
+
info.picocli
picocli
@@ -408,6 +429,11 @@
maven-resolver
${version.org.wildfly.channel}
+
+ io.fabric8
+ openshift-client
+ ${version.io.fabric8}
+
jakarta.annotation
@@ -483,6 +509,7 @@
cli
docs
doc-plugin
+ openshift-deployment
maven-resolver
tests