typeFilters = filters.getFilter("type");
- if (typeFilters == null || !typeFilters.contains("custom")) {
- Network network = inspectNetwork("openshift");
- networks.add(network);
- }
- return networks;
- }
-
- /**
- * Creates an ImageStream that tracks the repository.
- *
- * Note: This method does not cause the relevant image to actually be pulled to the local
- * repository, but creating the ImageStream is necessary as it is used to obtain the address of
- * the internal Docker registry later.
- *
- * @see DockerConnector#pull(PullParams, ProgressMonitor)
- */
- @Override
- public void pull(final PullParams params, final ProgressMonitor progressMonitor)
- throws IOException {
-
- String repo = params.getFullRepo(); // image to be pulled
- String tag = params.getTag(); // e.g. latest, usually
-
- String imageStreamName = KubernetesStringUtils.convertPullSpecToImageStreamName(repo);
- ImageStream existingImageStream;
-
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- existingImageStream =
- openShiftClient
- .imageStreams()
- .inNamespace(openShiftCheProjectName)
- .withName(imageStreamName)
- .get();
- }
-
- if (existingImageStream == null) {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- openShiftClient
- .imageStreams()
- .inNamespace(openShiftCheProjectName)
- .createNew()
- .withNewMetadata()
- .withName(imageStreamName) // imagestream id
- .endMetadata()
- .withNewSpec()
- .addNewTag()
- .withName(tag)
- .endTag()
- .withDockerImageRepository(repo) // tracking repo
- .endSpec()
- .withNewStatus()
- .withDockerImageRepository("")
- .endStatus()
- .done();
- }
- }
-
- // Wait for Image metadata to be obtained.
- ImageStream createdImageStream;
- for (int waitCount = 0; waitCount < OPENSHIFT_IMAGESTREAM_MAX_WAIT_COUNT; waitCount++) {
- try {
- Thread.sleep(OPENSHIFT_IMAGESTREAM_WAIT_DELAY);
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- }
-
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- createdImageStream =
- openShiftClient
- .imageStreams()
- .inNamespace(openShiftCheProjectName)
- .withName(imageStreamName)
- .get();
- }
-
- if (createdImageStream != null
- && createdImageStream.getStatus().getDockerImageRepository() != null) {
- LOG.info(String.format("Created ImageStream %s.", imageStreamName));
- return;
- }
- }
-
- throw new OpenShiftException(
- String.format("Failed to create ImageStream %s.", imageStreamName));
- }
-
- /**
- * Creates an ImageStreamTag that tracks a given image.
- *
- *
Docker tags are used extensively in Che: all workspaces run on tagged images tracking built
- * stacks. For new workspaces, or when snapshots are not used, the tracked image is e.g. {@code
- * eclipse/ubuntu_jdk8}, whereas for snapshotted workspaces, the tracked image is the snapshot
- * (e.g. {@code machine_snapshot-}.
- *
- * Since OpenShift does not support the same tagging functionality as Docker, tags are
- * implemented as ImageStreamTags, where the {@code From} field is always the original image, and
- * the ImageStreamTag name is derived from both the source image and the target image. This
- * replicates functionality for Che in Docker, while working differently under the hood. The
- * ImageStream name is derived from the image that is being tracked (e.g. {@code
- * eclipse/ubuntu_jdk8}), while the tag name is derived from the target image (e.g. {@code
- * eclipse-che/che_workspace}).
- *
- * @see DockerConnector#tag(TagParams)
- */
- @Override
- public void tag(final TagParams params) throws IOException {
- // E.g. `docker tag sourceImage targetImage`
- String paramsSourceImage = params.getImage(); // e.g. eclipse/ubuntu_jdk8
- String targetImage = params.getRepository(); // e.g. eclipse-che/
- String paramsTag = params.getTag();
-
- String sourceImage = KubernetesStringUtils.stripTagFromPullSpec(paramsSourceImage);
- String tag = KubernetesStringUtils.getTagNameFromPullSpec(paramsSourceImage);
- if (isNullOrEmpty(tag)) {
- tag = !isNullOrEmpty(paramsTag) ? paramsTag : "latest";
- }
-
- String sourceImageWithTag;
- // Check if sourceImage matches existing imageStreamTag (e.g. when tagging a snapshot)
- try {
- String sourceImageTagName = KubernetesStringUtils.convertPullSpecToTagName(sourceImage);
- ImageStreamTag existingTag = getImageStreamTagFromRepo(sourceImageTagName);
- sourceImageWithTag = existingTag.getTag().getFrom().getName();
- } catch (IOException e) {
- // Image not found.
- sourceImageWithTag = String.format("%s:%s", sourceImage, tag);
- }
-
- String imageStreamTagName =
- KubernetesStringUtils.createImageStreamTagName(sourceImageWithTag, targetImage);
-
- createImageStreamTag(sourceImageWithTag, imageStreamTagName);
- }
-
- @Override
- public ImageInfo inspectImage(InspectImageParams params) throws IOException {
-
- String image = KubernetesStringUtils.getImageStreamNameFromPullSpec(params.getImage());
-
- String imageStreamTagName = KubernetesStringUtils.convertPullSpecToTagName(image);
- ImageStreamTag imageStreamTag = getImageStreamTagFromRepo(imageStreamTagName);
-
- return getImageInfoFromTag(imageStreamTag);
- }
-
- @Override
- public void removeImage(final RemoveImageParams params) throws IOException {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- String image = KubernetesStringUtils.getImageStreamNameFromPullSpec(params.getImage());
- String imageStreamTagName = KubernetesStringUtils.convertPullSpecToTagName(image);
- ImageStreamTag imageStreamTag = getImageStreamTagFromRepo(imageStreamTagName);
- openShiftClient.resource(imageStreamTag).delete();
- }
- }
-
- /**
- * OpenShift does not support taking image snapshots since the underlying assumption is that Pods
- * are largely immutable (and so any snapshot would be identical to the image used to create the
- * pod). Che uses docker commit to create machine snapshots, which are used to restore workspaces.
- * To emulate this functionality in OpenShift, commit actually creates a new ImageStreamTag by
- * calling {@link OpenShiftConnector#tag(TagParams)} named for the snapshot that would be created.
- *
- * @see DockerConnector#commit(CommitParams)
- */
- @Override
- public String commit(final CommitParams params) throws IOException {
- String repo = params.getRepository(); // e.g. machine_snapshot_mdkfmksdfm
- String container = params.getContainer(); // container ID
-
- Pod pod = getChePodByContainerId(container);
- String image = pod.getSpec().getContainers().get(0).getImage();
- String imageStreamTagName = KubernetesStringUtils.getTagNameFromPullSpec(image);
-
- ImageStreamTag imageStreamTag = getImageStreamTagFromRepo(imageStreamTagName);
- String sourcePullSpec = imageStreamTag.getTag().getFrom().getName();
- String trackingRepo = KubernetesStringUtils.stripTagFromPullSpec(sourcePullSpec);
- String tag = KubernetesStringUtils.getTagNameFromPullSpec(sourcePullSpec);
-
- tag(TagParams.create(trackingRepo, repo).withTag(tag));
-
- return repo; // Return value not used.
- }
-
- @Override
- public void getEvents(final GetEventsParams params, MessageProcessor messageProcessor) {
- CountDownLatch waitForClose = new CountDownLatch(1);
- Watcher eventWatcher =
- new Watcher() {
- @Override
- public void eventReceived(Action action, io.fabric8.kubernetes.api.model.Event event) {
- // Do nothing;
- }
-
- @Override
- public void onClose(KubernetesClientException e) {
- if (e == null) {
- LOG.error("Eventwatch Closed");
- } else {
- LOG.error("Eventwatch Closed" + e.getMessage());
- }
- waitForClose.countDown();
- }
- };
- OpenShiftClient openShiftClient = new DefaultOpenShiftClient();
- openShiftClient.events().inNamespace(openShiftCheProjectName).watch(eventWatcher);
- try {
- waitForClose.await();
- } catch (InterruptedException e) {
- LOG.error("Thread interrupted while waiting for eventWatcher.");
- Thread.currentThread().interrupt();
- } finally {
- openShiftClient.close();
- }
- }
-
- @Override
- public void getContainerLogs(
- final GetContainerLogsParams params, MessageProcessor containerLogsProcessor)
- throws IOException {
- String container = params.getContainer(); // container ID
- Pod pod = getChePodByContainerId(container);
- if (pod != null) {
- String podName = pod.getMetadata().getName();
- boolean[] ret = new boolean[1];
- ret[0] = false;
- OpenShiftClient openShiftClient = new DefaultOpenShiftClient();
- try (LogWatch watchLog =
- openShiftClient
- .pods()
- .inNamespace(openShiftCheProjectName)
- .withName(podName)
- .watchLog()) {
- Watcher watcher =
- new Watcher() {
-
- @Override
- public void eventReceived(Action action, Pod resource) {
- if (action == Action.DELETED) {
- ret[0] = true;
- }
- }
-
- @Override
- public void onClose(KubernetesClientException cause) {
- ret[0] = true;
- }
- };
- openShiftClient
- .pods()
- .inNamespace(openShiftCheProjectName)
- .withName(podName)
- .watch(watcher);
- Thread.sleep(5000);
- InputStream is = watchLog.getOutput();
- BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is));
- while (!ret[0]) {
- String line = bufferedReader.readLine();
- containerLogsProcessor.process(new LogMessage(LogMessage.Type.DOCKER, line));
- }
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- } catch (IOException e) {
- // The kubernetes client throws an exception (Pipe not connected) when pod doesn't contain any logs.
- // We can ignore it.
- } finally {
- openShiftClient.close();
- }
- }
- }
-
- @Override
- public ContainerProcesses top(final TopParams params) throws IOException {
- String containerId = params.getContainer();
- Pod pod = getChePodByContainerId(containerId);
- String podName = pod.getMetadata().getName();
- String[] command;
- final String[] psArgs = params.getPsArgs();
- if (psArgs != null && psArgs.length != 0) {
- int length = psArgs.length + 1;
- command = new String[length];
- command[0] = PS_COMMAND;
- System.arraycopy(psArgs, 0, command, 1, psArgs.length);
- } else {
- command = new String[1];
- command[0] = PS_COMMAND;
- }
- ContainerProcesses processes = new ContainerProcesses();
- OpenShiftClient openShiftClient = new DefaultOpenShiftClient();
- try (ExecWatch watch =
- openShiftClient
- .pods()
- .inNamespace(openShiftCheProjectName)
- .withName(podName)
- .redirectingOutput()
- .redirectingError()
- .exec(command)) {
- BufferedReader reader = new BufferedReader(new InputStreamReader(watch.getOutput()));
- boolean first = true;
- int limit = 0;
- try {
- List procList = new ArrayList<>();
- while (reader.ready()) {
- String line = reader.readLine();
- if (line == null || line.isEmpty()) {
- continue;
- }
- if (line.startsWith("rpc error")) {
- throw new IOException(line);
- }
- line = line.trim();
- if (first) {
- String[] elements = line.split(TOP_REGEX_PATTERN);
- limit = elements.length;
- first = false;
- processes.setTitles(elements);
- } else {
- String[] elements = line.split(TOP_REGEX_PATTERN, limit);
- procList.add(elements);
- }
- }
- processes.setProcesses(procList.toArray(new String[0][0]));
- } catch (IOException e) {
- throw new OpenShiftException(e.getMessage());
- }
- } catch (KubernetesClientException e) {
- throw new OpenShiftException(e.getMessage());
- }
- return processes;
- }
-
- @Override
- public Exec createExec(final CreateExecParams params) throws IOException {
- String[] command = params.getCmd();
- String containerId = params.getContainer();
-
- Pod pod = getChePodByContainerId(containerId);
- String podName = pod.getMetadata().getName();
-
- String execId = KubernetesStringUtils.generateWorkspaceID();
- KubernetesExecHolder execHolder =
- new KubernetesExecHolder().withCommand(command).withPod(podName);
- execMap.put(execId, execHolder);
-
- return new Exec(command, execId);
- }
-
- @Override
- public void startExec(
- final StartExecParams params, @Nullable MessageProcessor execOutputProcessor)
- throws IOException {
- String execId = params.getExecId();
-
- KubernetesExecHolder exec = execMap.get(execId);
-
- String podName = exec.getPod();
- String[] command = exec.getCommand();
- for (int i = 0; i < command.length; i++) {
- command[i] = URLEncoder.encode(command[i], "UTF-8");
- }
-
- ExecutorService executor = Executors.newFixedThreadPool(2);
- OpenShiftClient openShiftClient = new DefaultOpenShiftClient();
- try (ExecWatch watch =
- openShiftClient
- .pods()
- .inNamespace(openShiftCheProjectName)
- .withName(podName)
- .redirectingOutput()
- .redirectingError()
- .exec(command);
- InputStreamPumper outputPump =
- new InputStreamPumper(
- watch.getOutput(),
- new KubernetesOutputAdapter(LogMessage.Type.STDOUT, execOutputProcessor));
- InputStreamPumper errorPump =
- new InputStreamPumper(
- watch.getError(),
- new KubernetesOutputAdapter(LogMessage.Type.STDERR, execOutputProcessor))) {
- Future> outFuture = executor.submit(outputPump);
- Future> errFuture = executor.submit(errorPump);
- // Short-term worksaround; the Futures above seem to never finish.
- Thread.sleep(2500);
- } catch (KubernetesClientException e) {
- throw new OpenShiftException(e.getMessage());
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- } finally {
- execMap.remove(execId);
- executor.shutdown();
- openShiftClient.close();
- }
- }
-
- @Override
- public SystemInfo getSystemInfo() throws IOException {
- OpenShiftClient openShiftClient = new DefaultOpenShiftClient();
- PodList chePods = openShiftClient.pods().inNamespace(this.openShiftCheProjectName).list();
- if (chePods.getItems().size() > 0) {
- Pod pod = chePods.getItems().get(0);
- Node node = openShiftClient.nodes().withName(pod.getSpec().getNodeName()).get();
- NodeSystemInfo nodeInfo = node.getStatus().getNodeInfo();
- SystemInfo systemInfo = new SystemInfo();
- systemInfo.setKernelVersion(nodeInfo.getKernelVersion());
- systemInfo.setOperatingSystem(nodeInfo.getOperatingSystem());
- systemInfo.setID(node.getMetadata().getUid());
- int containers =
- openShiftClient.pods().inNamespace(this.openShiftCheProjectName).list().getItems().size();
- int images = node.getStatus().getImages().size();
- systemInfo.setContainers(containers);
- systemInfo.setImages(images);
- systemInfo.setName(node.getMetadata().getName());
- String[] labels =
- node.getMetadata()
- .getLabels()
- .entrySet()
- .stream()
- .map(e -> String.format("%s=%s", e.getKey(), e.getValue()))
- .toArray(String[]::new);
- systemInfo.setLabels(labels);
- return systemInfo;
- } else {
- throw new OpenShiftException("No pod found");
- }
- }
-
- /**
- * Gets the ImageStreamTag corresponding to a given tag name (i.e. without the repository)
- *
- * @param imageStreamTagName the tag name to search for
- * @return
- * @throws IOException if either no matching tag is found, or there are multiple matches.
- */
- private ImageStreamTag getImageStreamTagFromRepo(String imageStreamTagName) throws IOException {
-
- // Since repository + tag are limited to 63 chars, it's possible that the entire
- // tag name did not fit, so we have to match a substring.
- String imageTagTrimmed =
- imageStreamTagName.length() > 20 ? imageStreamTagName.substring(0, 20) : imageStreamTagName;
-
- // Note: ideally, ImageStreamTags could be identified with a label, but it seems like
- // ImageStreamTags do not support labels.
- List imageStreams;
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- imageStreams =
- openShiftClient.imageStreamTags().inNamespace(openShiftCheProjectName).list().getItems();
- }
-
- // We only get ImageStreamTag names here, since these ImageStreamTags do not include
- // Docker metadata, for some reason.
- List imageStreamTags =
- imageStreams
- .stream()
- .filter(e -> e.getMetadata().getName().contains(imageTagTrimmed))
- .map(e -> e.getMetadata().getName())
- .collect(Collectors.toList());
-
- if (imageStreamTags.size() < 1) {
- throw new OpenShiftException(
- String.format("ImageStreamTag %s not found!", imageStreamTagName));
- } else if (imageStreamTags.size() > 1) {
- throw new OpenShiftException(
- String.format("Multiple ImageStreamTags found for name %s", imageStreamTagName));
- }
-
- String imageStreamTag = imageStreamTags.get(0);
-
- // Finally, get the ImageStreamTag, with Docker metadata.
- return getImageStreamTag(imageStreamTag);
- }
-
- private ImageStreamTag getImageStreamTag(final String imageStreamName) {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- return openShiftClient
- .imageStreamTags()
- .inNamespace(openShiftCheProjectName)
- .withName(imageStreamName)
- .get();
- }
- }
-
- private Service getCheServiceBySelector(String selectorKey, String selectorValue) {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- ServiceList svcs =
- openShiftClient.services().inNamespace(this.openShiftCheProjectName).list();
-
- Service svc =
- svcs.getItems()
- .stream()
- .filter(s -> s.getSpec().getSelector().containsKey(selectorKey))
- .filter(s -> s.getSpec().getSelector().get(selectorKey).equals(selectorValue))
- .findAny()
- .orElse(null);
-
- if (svc == null) {
- LOG.warn("No Service with selector {}={} could be found", selectorKey, selectorValue);
- }
- return svc;
- }
- }
-
- private Pod getChePodByContainerId(String containerId) throws IOException {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- PodList pods =
- openShiftClient
- .pods()
- .inNamespace(this.openShiftCheProjectName)
- .withLabel(
- CHE_CONTAINER_IDENTIFIER_LABEL_KEY,
- KubernetesStringUtils.getLabelFromContainerID(containerId))
- .list();
-
- List items = pods.getItems();
-
- if (items.isEmpty()) {
- LOG.error(
- "An OpenShift Pod with label {}={} could not be found",
- CHE_CONTAINER_IDENTIFIER_LABEL_KEY,
- containerId);
- throw new IOException(
- "An OpenShift Pod with label "
- + CHE_CONTAINER_IDENTIFIER_LABEL_KEY
- + "="
- + containerId
- + " could not be found");
- }
-
- if (items.size() > 1) {
- LOG.error(
- "There are {} pod with label {}={} (just one was expected)",
- items.size(),
- CHE_CONTAINER_IDENTIFIER_LABEL_KEY,
- containerId);
- throw new IOException(
- "There are "
- + items.size()
- + " pod with label "
- + CHE_CONTAINER_IDENTIFIER_LABEL_KEY
- + "="
- + containerId
- + " (just one was expeced)");
- }
-
- return items.get(0);
- }
- }
-
- /**
- * Extracts the ImageInfo stored in an ImageStreamTag. The returned object is the JSON that would
- * be returned by executing {@code docker inspect }, except, due to a quirk in OpenShift's
- * handling of this data, fields except for {@code Config} and {@code ContainerConfig} are null.
- *
- * @param imageStreamTag
- * @return
- */
- private ImageInfo getImageInfoFromTag(ImageStreamTag imageStreamTag) {
- // The DockerImageConfig string here is the JSON that would be returned by a docker inspect image,
- // except that the capitalization is inconsistent, breaking deserialization. Top level elements
- // are lowercased with underscores, while nested elements conform to FieldNamingPolicy.UPPER_CAMEL_CASE.
- // We're only converting the config fields for brevity; this means that other fields are null.
- Image tagImage = imageStreamTag.getImage();
- String dockerImageConfig = tagImage.getDockerImageConfig();
-
- if (!isNullOrEmpty(dockerImageConfig)) {
- LOG.info("imageStreamTag dockerImageConfig is not empty. Using it to get image info");
- ImageInfo info =
- GSON.fromJson(
- dockerImageConfig
- .replaceFirst("config", "Config")
- .replaceFirst("container_config", "ContainerConfig"),
- ImageInfo.class);
- return info;
- } else {
- LOG.info(
- "imageStreamTag dockerImageConfig empty. Using dockerImageMetadata to get image info");
- String dockerImageMetadata =
- GSON.toJson(tagImage.getAdditionalProperties().get("dockerImageMetadata"));
- ImageInfo info = GSON.fromJson(dockerImageMetadata, ImageInfo.class);
- return info;
- }
- }
-
- protected String getCheWorkspaceId(CreateContainerParams createContainerParams) {
- Stream env = Arrays.stream(createContainerParams.getContainerConfig().getEnv());
- String workspaceID =
- env.filter(v -> v.startsWith(CHE_WORKSPACE_ID_ENV_VAR) && v.contains("="))
- .map(v -> v.split("=", 2)[1])
- .findFirst()
- .orElse("");
- return workspaceID.replaceFirst("workspace", "");
- }
-
- private boolean isDevMachine(CreateContainerParams createContainerParams) {
- Stream env = Arrays.stream(createContainerParams.getContainerConfig().getEnv());
- return Boolean.parseBoolean(
- env.filter(v -> v.startsWith(CHE_IS_DEV_MACHINE_ENV_VAR) && v.contains("="))
- .map(v -> v.split("=", 2)[1])
- .findFirst()
- .orElse("false"));
- }
-
- private void createOpenShiftService(
- String deploymentName,
- String serviceName,
- Set exposedPorts,
- Map portsToRefName,
- Map additionalLabels,
- String[] endpointAliases) {
- Map selector =
- Collections.singletonMap(OPENSHIFT_DEPLOYMENT_LABEL, deploymentName);
- List ports = KubernetesService.getServicePortsFrom(exposedPorts, portsToRefName);
-
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- Service service =
- openShiftClient
- .services()
- .inNamespace(this.openShiftCheProjectName)
- .createNew()
- .withNewMetadata()
- .withName(serviceName)
- .withAnnotations(KubernetesLabelConverter.labelsToNames(additionalLabels))
- .endMetadata()
- .withNewSpec()
- .withSelector(selector)
- .withPorts(ports)
- .endSpec()
- .done();
-
- LOG.info("OpenShift service {} created", service.getMetadata().getName());
-
- for (ServicePort port : ports) {
- createOpenShiftRoute(serviceName, deploymentName, port.getName());
- }
- }
- }
-
- private void createOpenShiftRoute(String serviceName, String deploymentName, String serverRef) {
- String routeId = serviceName.replaceFirst(CHE_OPENSHIFT_RESOURCES_PREFIX, "");
- openShiftRouteCreator.createRoute(
- openShiftCheProjectName,
- cheServerExternalAddress,
- serverRef,
- serviceName,
- deploymentName,
- routeId,
- secureRoutes);
- }
-
- private void createOpenShiftDeployment(
- String deploymentName,
- String imageName,
- String sanitizedContainerName,
- Set exposedPorts,
- Map portsToRefName,
- String[] envVariables,
- String[] volumes,
- Map resourceLimits,
- Map resourceRequests)
- throws OpenShiftException {
-
- LOG.info("Creating OpenShift deployment {}", deploymentName);
-
- Map selector =
- Collections.singletonMap(OPENSHIFT_DEPLOYMENT_LABEL, deploymentName);
-
- LOG.info(
- "Adding container {} to OpenShift deployment {}", sanitizedContainerName, deploymentName);
-
- if (createWorkspaceDirs) {
- createWorkspaceDir(volumes);
- }
-
- Container container =
- new ContainerBuilder()
- .withName(sanitizedContainerName)
- .withImage(imageName)
- .withEnv(KubernetesEnvVar.getEnvFrom(envVariables))
- .withPorts(KubernetesContainer.getContainerPortsFrom(exposedPorts, portsToRefName))
- .withImagePullPolicy(OPENSHIFT_IMAGE_PULL_POLICY_IFNOTPRESENT)
- .withNewSecurityContext()
- .withPrivileged(false)
- .endSecurityContext()
- .withLivenessProbe(getLivenessProbeFrom(exposedPorts))
- .withVolumeMounts(getVolumeMountsFrom(volumes))
- .withNewResources()
- .withLimits(resourceLimits)
- .withRequests(resourceRequests)
- .endResources()
- .build();
-
- PodSpec podSpec =
- new PodSpecBuilder()
- .withContainers(container)
- .withVolumes(getVolumesFrom(volumes))
- .withTerminationGracePeriodSeconds(OPENSHIFT_POD_TERMINATION_GRACE_PERIOD)
- .build();
-
- Deployment deployment =
- new DeploymentBuilder()
- .withNewMetadata()
- .withName(deploymentName)
- .withNamespace(this.openShiftCheProjectName)
- .endMetadata()
- .withNewSpec()
- .withReplicas(1)
- .withNewSelector()
- .withMatchLabels(selector)
- .endSelector()
- .withNewTemplate()
- .withNewMetadata()
- .withLabels(selector)
- .endMetadata()
- .withSpec(podSpec)
- .endTemplate()
- .endSpec()
- .build();
-
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- deployment =
- openShiftClient
- .extensions()
- .deployments()
- .inNamespace(this.openShiftCheProjectName)
- .create(deployment);
- }
-
- LOG.info("OpenShift deployment {} created", deploymentName);
- }
-
- /**
- * Creates a new ImageStreamTag
- *
- * @param sourceImageWithTag the image that the ImageStreamTag will track
- * @param imageStreamTagName the name of the imageStream tag (e.g. {@code :})
- * @return the created ImageStreamTag
- * @throws IOException When {@code sourceImageWithTag} metadata cannot be found
- */
- private ImageStreamTag createImageStreamTag(String sourceImageWithTag, String imageStreamTagName)
- throws IOException {
-
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- openShiftClient
- .imageStreamTags()
- .inNamespace(openShiftCheProjectName)
- .createOrReplaceWithNew()
- .withNewMetadata()
- .withName(imageStreamTagName)
- .endMetadata()
- .withNewTag()
- .withNewFrom()
- .withKind("DockerImage")
- .withName(sourceImageWithTag)
- .endFrom()
- .endTag()
- .done();
-
- // Wait for image metadata to be pulled
- for (int waitCount = 0; waitCount < OPENSHIFT_IMAGESTREAM_MAX_WAIT_COUNT; waitCount++) {
- Thread.sleep(OPENSHIFT_IMAGESTREAM_WAIT_DELAY);
- ImageStreamTag createdTag =
- openShiftClient
- .imageStreamTags()
- .inNamespace(openShiftCheProjectName)
- .withName(imageStreamTagName)
- .get();
- if (createdTag != null) {
- LOG.info(
- String.format(
- "Created ImageStreamTag %s in namespace %s",
- createdTag.getMetadata().getName(), openShiftCheProjectName));
- return createdTag;
- }
- }
-
- throw new ImageNotFoundException(String.format("Image %s not found.", sourceImageWithTag));
-
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- throw new IOException(e.getLocalizedMessage(), e);
- }
- }
-
- /**
- * Collects the relevant information from a Service, an ImageInfo, and a Pod into a docker
- * ContainerInfo JSON object. The returned object is what would be returned by executing {@code
- * docker inspect }, with fields filled as available.
- *
- * @param svc
- * @param imageInfo
- * @param pod
- * @param containerId
- * @return
- * @throws OpenShiftException
- */
- private ContainerInfo createContainerInfo(
- Service svc, ImageInfo imageInfo, Pod pod, String containerId) throws OpenShiftException {
-
- // In Che on OpenShift, we only have one container per pod.
- Container container = pod.getSpec().getContainers().get(0);
- ContainerConfig imageContainerConfig = imageInfo.getContainerConfig();
-
- // HostConfig
- HostConfig hostConfig = new HostConfig();
- hostConfig.setBinds(new String[0]);
-
- // Env vars
- List imageEnv = Arrays.asList(imageContainerConfig.getEnv());
- List containerEnv =
- container
- .getEnv()
- .stream()
- .map(e -> String.format("%s=%s", e.getName(), e.getValue()))
- .collect(Collectors.toList());
- String[] env = Stream.concat(imageEnv.stream(), containerEnv.stream()).toArray(String[]::new);
-
- // Exposed Ports
- Map> ports = getCheServicePorts(svc);
- Map> exposedPorts = new HashMap<>();
- for (String key : ports.keySet()) {
- exposedPorts.put(key, Collections.emptyMap());
- }
-
- // Labels
- Map annotations =
- KubernetesLabelConverter.namesToLabels(svc.getMetadata().getAnnotations());
- Map containerLabels = imageInfo.getConfig().getLabels();
- Map labels =
- Stream.concat(annotations.entrySet().stream(), containerLabels.entrySet().stream())
- .filter(e -> e.getKey().startsWith(KubernetesLabelConverter.getCheServerLabelPrefix()))
- .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
-
- // ContainerConfig
- ContainerConfig config = imageContainerConfig;
- config.setHostname(svc.getMetadata().getName());
- config.setEnv(env);
- config.setExposedPorts(exposedPorts);
- config.setLabels(labels);
- config.setImage(container.getImage());
-
- // NetworkSettings
- NetworkSettings networkSettings = new NetworkSettings();
- networkSettings.setIpAddress(svc.getSpec().getClusterIP());
- networkSettings.setGateway(svc.getSpec().getClusterIP());
- networkSettings.setPorts(ports);
-
- // Make final ContainerInfo
- ContainerInfo info = new ContainerInfo();
- info.setId(containerId);
- info.setConfig(config);
- info.setNetworkSettings(networkSettings);
- info.setHostConfig(hostConfig);
- info.setImage(imageInfo.getConfig().getImage());
-
- // In Che on OpenShift, we only have one container per pod.
- info.setState(getContainerStates(pod).get(0));
- return info;
- }
-
- private List getContainerStates(final Pod pod) throws OpenShiftException {
- List containerStates = new ArrayList<>();
- List containerStatuses = pod.getStatus().getContainerStatuses();
- for (ContainerStatus status : containerStatuses) {
- io.fabric8.kubernetes.api.model.ContainerState state = status.getState();
-
- ContainerStateTerminated terminated = state.getTerminated();
- ContainerStateWaiting waiting = state.getWaiting();
- ContainerStateRunning running = state.getRunning();
-
- ContainerState containerState = new ContainerState();
-
- if (terminated != null) {
- containerState.setStatus("exited");
- } else if (waiting != null) {
- containerState.setStatus("paused");
- } else if (running != null) {
- containerState.setStatus("running");
- } else {
- throw new OpenShiftException(
- "Fail to detect the state of container with id " + status.getContainerID());
- }
- containerStates.add(containerState);
- }
- return containerStates;
- }
-
- private void createWorkspaceDir(String[] volumes) throws OpenShiftException {
- PersistentVolumeClaim pvc = getClaimCheWorkspace();
- String workspaceSubpath = getWorkspaceSubpath(volumes);
- if (pvc != null && !isNullOrEmpty(workspaceSubpath)) {
- LOG.info("Making sure directory exists for workspace {}", workspaceSubpath);
- boolean succeeded =
- openShiftPvcHelper.createJobPod(
- workspacesPersistentVolumeClaim,
- openShiftCheProjectName,
- "create-",
- OpenShiftPvcHelper.Command.MAKE,
- workspaceSubpath);
- if (!succeeded) {
- LOG.error(
- "Failed to create workspace directory {} in PVC {}",
- workspaceSubpath,
- workspacesPersistentVolumeClaim);
- throw new OpenShiftException("Failed to create workspace directory in PVC");
- }
- }
- }
-
- /**
- * Gets the workspace subpath from an array of volumes. Since volumes provided are those used when
- * running Che in Docker, most of the volume spec is ignored; this method returns the subpath
- * within the hostpath that refers to the workspace.
- *
- * E.g. for a volume {@code /data/workspaces/wksp-8z00:/projects:Z}, this method will return
- * "wksp-8z00".
- *
- * @param volumes
- * @return
- */
- private String getWorkspaceSubpath(String[] volumes) {
- String workspaceSubpath = null;
- for (String volume : volumes) {
- // Volumes are structured ::.
- // We first check that matches the mount path for projects
- // and then extract the hostpath directory. The first part of the volume
- // String will be structured /workspaceName.
- String mountPath = volume.split(":", 3)[1];
- if (cheWorkspaceProjectsStorage.equals(mountPath)) {
- workspaceSubpath = volume.split(":", 3)[0].replaceAll(cheWorkspaceStorage, "");
- if (workspaceSubpath.startsWith("/")) {
- workspaceSubpath = workspaceSubpath.substring(1);
- }
- }
- }
- return workspaceSubpath;
- }
-
- private List getVolumeMountsFrom(String[] volumes) {
- List vms = new ArrayList<>();
- PersistentVolumeClaim pvc = getClaimCheWorkspace();
- if (pvc != null) {
- String subPath = getWorkspaceSubpath(volumes);
- if (subPath != null) {
- VolumeMount vm =
- new VolumeMountBuilder()
- .withMountPath(cheWorkspaceProjectsStorage)
- .withName(workspacesPersistentVolumeClaim)
- .withSubPath(subPath)
- .build();
-
- // add a mount from PVC for the logs
- VolumeMount logsVm =
- new VolumeMountBuilder()
- .withMountPath(CHE_MOUNTED_WORKSPACE_FOLDER)
- .withName(workspacesPersistentVolumeClaim)
- .withSubPath(subPath + WORKSPACE_LOGS_FOLDER_SUFFIX)
- .build();
-
- vms.add(vm);
- vms.add(logsVm);
- }
- }
- return vms;
- }
-
- private List getVolumesFrom(String[] volumes) {
- List vs = new ArrayList<>();
- PersistentVolumeClaim pvc = getClaimCheWorkspace();
- if (pvc != null) {
- for (String volume : volumes) {
- String mountPath = volume.split(":", 3)[1];
- if (cheWorkspaceProjectsStorage.equals(mountPath)) {
- PersistentVolumeClaimVolumeSource pvcs =
- new PersistentVolumeClaimVolumeSourceBuilder()
- .withClaimName(workspacesPersistentVolumeClaim)
- .build();
- Volume v =
- new VolumeBuilder()
- .withPersistentVolumeClaim(pvcs)
- .withName(workspacesPersistentVolumeClaim)
- .build();
- vs.add(v);
- }
- }
- }
- return vs;
- }
-
- private PersistentVolumeClaim getClaimCheWorkspace() {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- PersistentVolumeClaimList pvcList =
- openShiftClient.persistentVolumeClaims().inNamespace(openShiftCheProjectName).list();
- for (PersistentVolumeClaim pvc : pvcList.getItems()) {
- if (workspacesPersistentVolumeClaim.equals(pvc.getMetadata().getName())) {
- return pvc;
- }
- }
- Map requests = new HashMap<>();
- requests.put("storage", new Quantity(workspacesPvcQuantity));
- Map annotations =
- Collections.singletonMap(
- OPENSHIFT_VOLUME_STORAGE_CLASS, OPENSHIFT_VOLUME_STORAGE_CLASS_NAME);
- PersistentVolumeClaim pvc =
- new PersistentVolumeClaimBuilder()
- .withNewMetadata()
- .withName(workspacesPersistentVolumeClaim)
- .withAnnotations(annotations)
- .endMetadata()
- .withNewSpec()
- .withAccessModes("ReadWriteOnce")
- .withNewResources()
- .withRequests(requests)
- .endResources()
- .endSpec()
- .build();
- pvc =
- openShiftClient.persistentVolumeClaims().inNamespace(openShiftCheProjectName).create(pvc);
- LOG.info("Creating OpenShift PVC {}", pvc.getMetadata().getName());
- return pvc;
- }
- }
-
- private String waitAndRetrieveContainerID(String deploymentName) throws IOException {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- for (int i = 0; i < OPENSHIFT_WAIT_POD_TIMEOUT; i++) {
- try {
- Thread.sleep(OPENSHIFT_WAIT_POD_DELAY);
- } catch (InterruptedException ex) {
- Thread.currentThread().interrupt();
- }
-
- List pods =
- openShiftClient
- .pods()
- .inNamespace(this.openShiftCheProjectName)
- .withLabel(OPENSHIFT_DEPLOYMENT_LABEL, deploymentName)
- .list()
- .getItems();
-
- if (pods.size() < 1) {
- throw new OpenShiftException(
- String.format("Pod with deployment name %s not found", deploymentName));
- } else if (pods.size() > 1) {
- throw new OpenShiftException(
- String.format("Multiple pods with deployment name %s found", deploymentName));
- }
-
- Pod pod = pods.get(0);
- String status = pod.getStatus().getPhase();
- if (OPENSHIFT_POD_STATUS_RUNNING.equals(status)) {
- String containerID = pod.getStatus().getContainerStatuses().get(0).getContainerID();
- String normalizedID = KubernetesStringUtils.normalizeContainerID(containerID);
- openShiftClient
- .pods()
- .inNamespace(openShiftCheProjectName)
- .withName(pod.getMetadata().getName())
- .edit()
- .editMetadata()
- .addToLabels(
- CHE_CONTAINER_IDENTIFIER_LABEL_KEY,
- KubernetesStringUtils.getLabelFromContainerID(normalizedID))
- .endMetadata()
- .done();
- return normalizedID;
- }
- }
- return null;
- }
- }
-
- /**
- * Adds OpenShift liveness probe to the container. Liveness probe is configured via TCP Socket
- * Check - for dev machines by checking Workspace API agent port (4401), for non-dev by checking
- * Terminal port (4411)
- *
- * @param exposedPorts
- * @see OpenShift
- * Application Health
- */
- private Probe getLivenessProbeFrom(final Set exposedPorts) {
- int port = 0;
-
- if (isDevMachine(exposedPorts)) {
- port = CHE_WORKSPACE_AGENT_PORT;
- } else if (isTerminalAgentInjected(exposedPorts)) {
- port = CHE_TERMINAL_AGENT_PORT;
- }
-
- if (port != 0) {
- return new ProbeBuilder()
- .withNewTcpSocket()
- .withNewPort(port)
- .endTcpSocket()
- .withInitialDelaySeconds(openShiftLivenessProbeDelay)
- .withTimeoutSeconds(openShiftLivenessProbeTimeout)
- .build();
- }
-
- return null;
- }
-
- private Map> getCheServicePorts(Service service) {
- Map> networkSettingsPorts = new HashMap<>();
- List servicePorts = service.getSpec().getPorts();
- LOG.info(
- "Retrieving {} ports exposed by service {}",
- servicePorts.size(),
- service.getMetadata().getName());
- for (ServicePort servicePort : servicePorts) {
- String protocol = servicePort.getProtocol();
- String targetPort = String.valueOf(servicePort.getTargetPort().getIntVal());
- String nodePort = String.valueOf(servicePort.getNodePort());
- String portName = servicePort.getName();
-
- LOG.info("Port: {}{}{} ({})", targetPort, DOCKER_PROTOCOL_PORT_DELIMITER, protocol, portName);
-
- networkSettingsPorts.put(
- targetPort + DOCKER_PROTOCOL_PORT_DELIMITER + protocol.toLowerCase(),
- Collections.singletonList(
- new PortBinding().withHostIp(CHE_DEFAULT_EXTERNAL_ADDRESS).withHostPort(nodePort)));
- }
- return networkSettingsPorts;
- }
-
- /**
- * @param exposedPorts
- * @return true if machine exposes 4411/tcp port used by Terminal agent, false otherwise
- */
- private boolean isTerminalAgentInjected(final Set exposedPorts) {
- return exposedPorts.contains(CHE_TERMINAL_AGENT_PORT + "/tcp");
- }
-
- /**
- * @param exposedPorts
- * @return true if machine exposes 4401/tcp port used by Worspace API agent, false otherwise
- */
- private boolean isDevMachine(final Set exposedPorts) {
- return exposedPorts.contains(CHE_WORKSPACE_AGENT_PORT + "/tcp");
- }
-
- private String getDeploymentName(final RemoveContainerParams params) throws IOException {
- String containerId = params.getContainer();
- Pod pod = getChePodByContainerId(containerId);
- String deploymentName = pod.getMetadata().getLabels().get(OPENSHIFT_DEPLOYMENT_LABEL);
- return deploymentName;
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftDeploymentCleaner.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftDeploymentCleaner.java
deleted file mode 100644
index e6a14d7c51..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftDeploymentCleaner.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client;
-
-import io.fabric8.kubernetes.api.model.Pod;
-import io.fabric8.kubernetes.api.model.Service;
-import io.fabric8.kubernetes.api.model.extensions.Deployment;
-import io.fabric8.kubernetes.api.model.extensions.ReplicaSet;
-import io.fabric8.openshift.api.model.Route;
-import io.fabric8.openshift.client.DefaultOpenShiftClient;
-import io.fabric8.openshift.client.OpenShiftClient;
-import java.io.IOException;
-import java.util.List;
-import javax.inject.Singleton;
-import org.eclipse.che.plugin.openshift.client.exception.OpenShiftException;
-import org.eclipse.che.plugin.openshift.client.kubernetes.KubernetesResourceUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@Singleton
-public class OpenShiftDeploymentCleaner {
- private static final Logger LOG = LoggerFactory.getLogger(OpenShiftDeploymentCleaner.class);
- private static final int OPENSHIFT_POD_DELETION_TIMEOUT = 120;
- private static final int OPENSHIFT_WAIT_POD_DELAY = 1000;
-
- public void cleanDeploymentResources(final String deploymentName, final String namespace)
- throws IOException {
- cleanUpWorkspaceResources(deploymentName, namespace);
- waitUntilWorkspacePodIsDeleted(deploymentName, namespace);
- }
-
- private void cleanUpWorkspaceResources(final String deploymentName, final String namespace)
- throws IOException {
- Deployment deployment = KubernetesResourceUtil.getDeploymentByName(deploymentName, namespace);
- Service service =
- KubernetesResourceUtil.getServiceBySelector(
- OpenShiftConnector.OPENSHIFT_DEPLOYMENT_LABEL, deploymentName, namespace);
- List routes =
- KubernetesResourceUtil.getRoutesByLabel(
- OpenShiftConnector.OPENSHIFT_DEPLOYMENT_LABEL, deploymentName, namespace);
- List replicaSets =
- KubernetesResourceUtil.getReplicaSetByLabel(
- OpenShiftConnector.OPENSHIFT_DEPLOYMENT_LABEL, deploymentName, namespace);
-
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- if (routes != null) {
- for (Route route : routes) {
- LOG.info("Removing OpenShift Route {}", route.getMetadata().getName());
- openShiftClient.resource(route).delete();
- }
- }
-
- if (service != null) {
- LOG.info("Removing OpenShift Service {}", service.getMetadata().getName());
- openShiftClient.resource(service).delete();
- }
-
- if (deployment != null) {
- LOG.info("Removing OpenShift Deployment {}", deployment.getMetadata().getName());
- openShiftClient.resource(deployment).delete();
- }
-
- if (replicaSets != null && replicaSets.size() > 0) {
- LOG.info("Removing OpenShift ReplicaSets for deployment {}", deploymentName);
- replicaSets.forEach(rs -> openShiftClient.resource(rs).delete());
- }
- }
- }
-
- private void waitUntilWorkspacePodIsDeleted(final String deploymentName, final String namespace)
- throws OpenShiftException {
- try (OpenShiftClient client = new DefaultOpenShiftClient()) {
- for (int waitCount = 0; waitCount < OPENSHIFT_POD_DELETION_TIMEOUT; waitCount++) {
- List pods =
- client
- .pods()
- .inNamespace(namespace)
- .withLabel(OpenShiftConnector.OPENSHIFT_DEPLOYMENT_LABEL, deploymentName)
- .list()
- .getItems();
-
- if (pods.size() == 0) {
- return;
- }
- Thread.sleep(OPENSHIFT_WAIT_POD_DELAY);
- }
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- LOG.info("Thread interrupted while cleaning up workspace");
- }
-
- throw new OpenShiftException("Timeout while waiting for pods to terminate");
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftPvcHelper.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftPvcHelper.java
deleted file mode 100644
index 0bbc89b48e..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftPvcHelper.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client;
-
-import static org.eclipse.che.plugin.openshift.client.OpenShiftConnector.WORKSPACE_LOGS_FOLDER_SUFFIX;
-
-import io.fabric8.kubernetes.api.model.Container;
-import io.fabric8.kubernetes.api.model.ContainerBuilder;
-import io.fabric8.kubernetes.api.model.PersistentVolumeClaimVolumeSource;
-import io.fabric8.kubernetes.api.model.PersistentVolumeClaimVolumeSourceBuilder;
-import io.fabric8.kubernetes.api.model.Pod;
-import io.fabric8.kubernetes.api.model.PodBuilder;
-import io.fabric8.kubernetes.api.model.Quantity;
-import io.fabric8.kubernetes.api.model.Volume;
-import io.fabric8.kubernetes.api.model.VolumeBuilder;
-import io.fabric8.kubernetes.api.model.VolumeMount;
-import io.fabric8.kubernetes.api.model.VolumeMountBuilder;
-import io.fabric8.openshift.client.DefaultOpenShiftClient;
-import io.fabric8.openshift.client.OpenShiftClient;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.stream.Collectors;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.inject.Singleton;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Helper class for executing simple commands in a Persistent Volume on Openshift.
- *
- * Creates a short-lived Pod using a CentOS image which mounts a specified PVC and executes a
- * command (either {@code mkdir -p } or {@code rm -rf For mkdir commands, an in-memory list of created workspaces is stored and used to avoid
- * calling mkdir unnecessarily. However, this list is not persisted, so dir creation is not tracked
- * between restarts.
- *
- * @author amisevsk
- */
-@Singleton
-public class OpenShiftPvcHelper {
-
- private static final Logger LOG = LoggerFactory.getLogger(OpenShiftPvcHelper.class);
-
- private static final String POD_PHASE_SUCCEEDED = "Succeeded";
- private static final String POD_PHASE_FAILED = "Failed";
- private static final String[] MKDIR_WORKSPACE_COMMAND = new String[] {"mkdir", "-p"};
- private static final String[] RMDIR_WORKSPACE_COMMAND = new String[] {"rm", "-rf"};
-
- private static final Set createdWorkspaces = ConcurrentHashMap.newKeySet();
-
- private final String jobImage;
- private final String jobMemoryLimit;
-
- protected enum Command {
- REMOVE,
- MAKE
- }
-
- @Inject
- protected OpenShiftPvcHelper(
- @Named("che.openshift.jobs.image") String jobImage,
- @Named("che.openshift.jobs.memorylimit") String jobMemoryLimit) {
- this.jobImage = jobImage;
- this.jobMemoryLimit = jobMemoryLimit;
- }
-
- /**
- * Creates a pod with {@code command} and reports whether it succeeded
- *
- * @param workspacesPvcName name of the PVC to mount
- * @param projectNamespace OpenShift namespace
- * @param jobNamePrefix prefix used for pod metadata name. Name structure will normally be {@code
- * } if only one path is passed, or {@code batch} if multiple
- * paths are provided
- * @param command command to execute in PVC.
- * @param workspaceDirs list of arguments attached to command. A list of directories to
- * create/delete.
- * @return true if Pod terminates with phase "Succeeded" or mkdir command issued for already
- * created worksapce, false otherwise.
- * @see Command
- */
- protected boolean createJobPod(
- String workspacesPvcName,
- String projectNamespace,
- String jobNamePrefix,
- Command command,
- String... workspaceDirs) {
-
- if (workspaceDirs.length == 0) {
- return true;
- }
-
- List logsDirs = Arrays.asList(workspaceDirs);
- logsDirs =
- logsDirs
- .stream()
- .map(dir -> dir + WORKSPACE_LOGS_FOLDER_SUFFIX)
- .collect(Collectors.toList());
-
- List allDirs = new ArrayList<>();
- allDirs.addAll(Arrays.asList(workspaceDirs));
- allDirs.addAll(logsDirs);
- String[] allDirsArray = allDirs.toArray(new String[allDirs.size()]);
-
- if (Command.MAKE.equals(command)) {
- String[] dirsToCreate = filterDirsToCreate(allDirsArray);
- if (dirsToCreate.length == 0) {
- return true;
- }
- allDirsArray = dirsToCreate;
- }
-
- VolumeMount vm =
- new VolumeMountBuilder().withMountPath("/projects").withName(workspacesPvcName).build();
-
- PersistentVolumeClaimVolumeSource pvcs =
- new PersistentVolumeClaimVolumeSourceBuilder().withClaimName(workspacesPvcName).build();
-
- Volume volume =
- new VolumeBuilder().withPersistentVolumeClaim(pvcs).withName(workspacesPvcName).build();
-
- String[] jobCommand = getCommand(command, "/projects/", allDirsArray);
- LOG.info(
- "Executing command {} in PVC {} for {} dirs",
- jobCommand[0],
- workspacesPvcName,
- allDirs.size());
-
- Map limit = Collections.singletonMap("memory", new Quantity(jobMemoryLimit));
-
- String podName =
- workspaceDirs.length > 1 ? jobNamePrefix + "batch" : jobNamePrefix + workspaceDirs[0];
-
- Container container =
- new ContainerBuilder()
- .withName(podName)
- .withImage(jobImage)
- .withImagePullPolicy("IfNotPresent")
- .withNewSecurityContext()
- .withPrivileged(false)
- .endSecurityContext()
- .withCommand(jobCommand)
- .withVolumeMounts(vm)
- .withNewResources()
- .withLimits(limit)
- .endResources()
- .build();
-
- Pod podSpec =
- new PodBuilder()
- .withNewMetadata()
- .withName(podName)
- .endMetadata()
- .withNewSpec()
- .withContainers(container)
- .withVolumes(volume)
- .withRestartPolicy("Never")
- .endSpec()
- .build();
-
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- openShiftClient.pods().inNamespace(projectNamespace).create(podSpec);
- boolean completed = false;
- while (!completed) {
- Pod pod = openShiftClient.pods().inNamespace(projectNamespace).withName(podName).get();
- String phase = pod.getStatus().getPhase();
- switch (phase) {
- case POD_PHASE_FAILED:
- LOG.info("Pod command {} failed", Arrays.toString(jobCommand));
- // fall through
- case POD_PHASE_SUCCEEDED:
- openShiftClient.resource(pod).delete();
- updateCreatedDirs(command, phase, allDirsArray);
- return POD_PHASE_SUCCEEDED.equals(phase);
- default:
- Thread.sleep(1000);
- }
- }
- } catch (InterruptedException e) {
- Thread.currentThread().interrupt();
- }
- return false;
- }
-
- private String[] getCommand(Command commandType, String mountPath, String... dirs) {
- String[] command = new String[0];
- switch (commandType) {
- case MAKE:
- command = MKDIR_WORKSPACE_COMMAND;
- break;
- case REMOVE:
- command = RMDIR_WORKSPACE_COMMAND;
- break;
- }
-
- String[] dirsWithPath =
- Arrays.asList(dirs).stream().map(dir -> mountPath + dir).toArray(String[]::new);
-
- String[] fullCommand = new String[command.length + dirsWithPath.length];
-
- System.arraycopy(command, 0, fullCommand, 0, command.length);
- System.arraycopy(dirsWithPath, 0, fullCommand, command.length, dirsWithPath.length);
- return fullCommand;
- }
-
- private void updateCreatedDirs(Command command, String phase, String... workspaceDirs) {
- if (!POD_PHASE_SUCCEEDED.equals(phase)) {
- return;
- }
- List dirs = Arrays.asList(workspaceDirs);
- switch (command) {
- case MAKE:
- createdWorkspaces.addAll(dirs);
- break;
- case REMOVE:
- createdWorkspaces.removeAll(dirs);
- break;
- }
- }
-
- private String[] filterDirsToCreate(String[] allDirs) {
- List dirs = Arrays.asList(allDirs);
- List dirsToCreate = new ArrayList<>();
- for (String dir : dirs) {
- if (!createdWorkspaces.contains(dir)) {
- dirsToCreate.add(dir);
- }
- }
- return dirsToCreate.toArray(new String[dirsToCreate.size()]);
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftRouteCreator.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftRouteCreator.java
deleted file mode 100644
index 30de1efe08..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftRouteCreator.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client;
-
-import io.fabric8.openshift.api.model.DoneableRoute;
-import io.fabric8.openshift.api.model.Route;
-import io.fabric8.openshift.api.model.RouteFluent.SpecNested;
-import io.fabric8.openshift.client.DefaultOpenShiftClient;
-import io.fabric8.openshift.client.OpenShiftClient;
-import javax.inject.Singleton;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-@Singleton
-public class OpenShiftRouteCreator {
- private static final Logger LOG = LoggerFactory.getLogger(OpenShiftRouteCreator.class);
- private static final String TLS_TERMINATION_EDGE = "edge";
- private static final String REDIRECT_INSECURE_EDGE_TERMINATION_POLICY = "Redirect";
-
- public void createRoute(
- final String namespace,
- final String openShiftNamespaceExternalAddress,
- final String serverRef,
- final String serviceName,
- final String deploymentName,
- final String routeId,
- final boolean enableTls) {
-
- if (openShiftNamespaceExternalAddress == null) {
- throw new IllegalArgumentException(
- "Property che.docker.ip.external must be set when using openshift.");
- }
-
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- String routeName = generateRouteName(routeId, serverRef);
- String serviceHost = generateRouteHost(routeName, openShiftNamespaceExternalAddress);
-
- SpecNested routeSpec =
- openShiftClient
- .routes()
- .inNamespace(namespace)
- .createNew()
- .withNewMetadata()
- .withName(routeName)
- .addToLabels(OpenShiftConnector.OPENSHIFT_DEPLOYMENT_LABEL, deploymentName)
- .endMetadata()
- .withNewSpec()
- .withHost(serviceHost)
- .withNewTo()
- .withKind("Service")
- .withName(serviceName)
- .endTo()
- .withNewPort()
- .withNewTargetPort()
- .withStrVal(serverRef)
- .endTargetPort()
- .endPort();
-
- if (enableTls) {
- routeSpec
- .withNewTls()
- .withTermination(TLS_TERMINATION_EDGE)
- .withInsecureEdgeTerminationPolicy(REDIRECT_INSECURE_EDGE_TERMINATION_POLICY)
- .endTls();
- }
-
- Route route = routeSpec.endSpec().done();
-
- LOG.info("OpenShift route {} created", route.getMetadata().getName());
- }
- }
-
- private String generateRouteName(final String serviceName, final String serverRef) {
- return serverRef + "-" + serviceName;
- }
-
- private String generateRouteHost(
- final String routeName, final String openShiftNamespaceExternalAddress) {
- return routeName + "-" + openShiftNamespaceExternalAddress;
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftVersion.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftVersion.java
deleted file mode 100644
index db0d8bfd15..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftVersion.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client;
-
-import java.util.Date;
-import org.eclipse.che.plugin.docker.client.json.Version;
-
-public class OpenShiftVersion {
- private String major;
- private String minor;
- private String gitVersion;
- private String gitTreeState;
- private Date buildDate;
- private String goVersion;
- private String gitCommit;
- private String compiler;
- private String platform;
-
- public String getMajor() {
- return major;
- }
-
- public void setMajor(String major) {
- this.major = major;
- }
-
- public String getMinor() {
- return minor;
- }
-
- public void setMinor(String minor) {
- this.minor = minor;
- }
-
- public String getGitVersion() {
- return gitVersion;
- }
-
- public void setGitVersion(String gitVersion) {
- this.gitVersion = gitVersion;
- }
-
- public String getGitTreeState() {
- return gitTreeState;
- }
-
- public void setGitTreeState(String gitTreeState) {
- this.gitTreeState = gitTreeState;
- }
-
- public Date getBuildDate() {
- return buildDate;
- }
-
- public void setBuildDate(Date buildDate) {
- this.buildDate = buildDate;
- }
-
- public String getGoVersion() {
- return goVersion;
- }
-
- public void setGoVersion(String goVersion) {
- this.goVersion = goVersion;
- }
-
- public String getGitCommit() {
- return gitCommit;
- }
-
- public void setGitCommit(String gitCommit) {
- this.gitCommit = gitCommit;
- }
-
- public String getCompiler() {
- return compiler;
- }
-
- public void setCompiler(String compiler) {
- this.compiler = compiler;
- }
-
- public String getPlatform() {
- return platform;
- }
-
- public void setPlatform(String platform) {
- this.platform = platform;
- }
-
- public Version getVersion() {
- Version version = new Version();
- version.setVersion(major + "." + minor);
- version.setGitCommit(getGitCommit());
- version.setGoVersion(getGoVersion());
- if (getPlatform() != null) {
- String[] elements = getPlatform().split("/");
- if (elements.length == 2) {
- version.setOs(elements[0]);
- version.setArch(elements[1]);
- }
- }
- return version;
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleaner.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleaner.java
deleted file mode 100644
index f808b84df2..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleaner.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client;
-
-import static com.google.common.base.Strings.isNullOrEmpty;
-
-import com.google.common.annotations.VisibleForTesting;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import javax.inject.Inject;
-import javax.inject.Named;
-import javax.inject.Singleton;
-import org.eclipse.che.api.core.model.workspace.Workspace;
-import org.eclipse.che.api.core.notification.EventService;
-import org.eclipse.che.api.core.notification.EventSubscriber;
-import org.eclipse.che.api.workspace.server.event.ServerIdleEvent;
-import org.eclipse.che.api.workspace.shared.event.WorkspaceRemovedEvent;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Class used to remove workspace directories in Persistent Volume when a workspace is delete while
- * running on OpenShift. Deleted workspace directories are stored in a list. Upon Che server idling,
- * all of these workspaces are deleted simultaneously from the PVC using a {@link
- * OpenShiftPvcHelper} job.
- *
- * Since deleting a workspace does not immediately remove its files, re-creating a workspace with
- * a previously used name can result in files from the previous workspace still being present.
- *
- * @author amisevsk
- * @author Sergii Leshchenko
- */
-@Singleton
-public class OpenShiftWorkspaceFilesCleaner implements EventSubscriber {
-
- private static final Logger LOG = LoggerFactory.getLogger(OpenShiftConnector.class);
-
- private final Set deleteQueue;
- private final String projectNamespace;
- private final String workspacesPvcName;
- private final OpenShiftPvcHelper openShiftPvcHelper;
-
- @Inject
- public OpenShiftWorkspaceFilesCleaner(
- OpenShiftPvcHelper openShiftPvcHelper,
- @Named("che.openshift.project") String projectNamespace,
- @Named("che.openshift.workspaces.pvc.name") String workspacesPvcName) {
- this.projectNamespace = projectNamespace;
- this.workspacesPvcName = workspacesPvcName;
- this.openShiftPvcHelper = openShiftPvcHelper;
- this.deleteQueue = ConcurrentHashMap.newKeySet();
- }
-
- @Inject
- public void subscribe(EventService eventService) {
- eventService.subscribe(this);
- eventService.subscribe(event -> deleteWorkspacesInQueue(), ServerIdleEvent.class);
- }
-
- @Override
- public void onEvent(WorkspaceRemovedEvent event) {
- Workspace workspace = event.getWorkspace();
- String workspaceName = workspace.getConfig().getName();
- if (isNullOrEmpty(workspaceName)) {
- LOG.error("Could not get workspace name for files removal.");
- return;
- }
- deleteQueue.add(workspaceName);
- }
-
- @VisibleForTesting
- void deleteWorkspacesInQueue() {
- List deleteQueueCopy = new ArrayList<>(deleteQueue);
- String[] dirsToDelete = deleteQueueCopy.toArray(new String[deleteQueueCopy.size()]);
-
- LOG.info("Deleting {} workspaces on PVC {}", deleteQueueCopy.size(), workspacesPvcName);
- boolean successful =
- openShiftPvcHelper.createJobPod(
- workspacesPvcName,
- projectNamespace,
- "delete-",
- OpenShiftPvcHelper.Command.REMOVE,
- dirsToDelete);
- if (successful) {
- deleteQueue.removeAll(deleteQueueCopy);
- }
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/exception/OpenShiftException.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/exception/OpenShiftException.java
deleted file mode 100644
index ce1fe1b4e1..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/exception/OpenShiftException.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.exception;
-
-import java.io.IOException;
-
-public class OpenShiftException extends IOException {
-
- public OpenShiftException(String message) {
- super(message);
- }
-
- public OpenShiftException(Throwable cause) {
- super(cause);
- }
-
- public OpenShiftException(String message, Throwable cause) {
- super(message, cause);
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainer.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainer.java
deleted file mode 100644
index f14e5b5422..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainer.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import static com.google.common.base.Strings.isNullOrEmpty;
-
-import io.fabric8.kubernetes.api.model.ContainerPort;
-import io.fabric8.kubernetes.api.model.ContainerPortBuilder;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import org.eclipse.che.plugin.docker.client.json.ContainerConfig;
-import org.eclipse.che.plugin.docker.client.json.ImageConfig;
-
-/** Provides API for managing Kubernetes {@link ContainerPort} */
-public final class KubernetesContainer {
-
- private KubernetesContainer() {}
-
- /**
- * Retrieves list of ({@link ContainerPort} based on ports defined in {@link ContainerConfig} and
- * {@link ImageConfig}
- *
- * @param exposedPorts
- * @return list of {@link ContainerPort}
- */
- public static List getContainerPortsFrom(
- Set exposedPorts, Map portsToRefName) {
- List containerPorts = new ArrayList<>(exposedPorts.size());
- for (String exposedPort : exposedPorts) {
- String[] portAndProtocol = exposedPort.split("/", 2);
- String port = portAndProtocol[0];
- String protocol = portAndProtocol[1].toUpperCase();
-
- int portNumber = Integer.parseInt(port);
- String portName = portsToRefName.get(exposedPort);
- portName = isNullOrEmpty(portName) ? "server-" + exposedPort.replace("/", "-") : portName;
-
- ContainerPort containerPort =
- new ContainerPortBuilder()
- .withName(portName)
- .withProtocol(protocol)
- .withContainerPort(portNumber)
- .build();
- containerPorts.add(containerPort);
- }
- return containerPorts;
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVar.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVar.java
deleted file mode 100644
index 0fa7109009..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVar.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import io.fabric8.kubernetes.api.model.EnvVar;
-import io.fabric8.kubernetes.api.model.EnvVarBuilder;
-import java.util.ArrayList;
-import java.util.List;
-import org.eclipse.che.plugin.docker.client.json.ContainerConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/** Provides API for managing Kubernetes {@link EnvVar} */
-public final class KubernetesEnvVar {
- private static final Logger LOG = LoggerFactory.getLogger(KubernetesEnvVar.class);
-
- private KubernetesEnvVar() {}
-
- /**
- * Retrieves list of {@link EnvVar} based on environment variables specified in {@link
- * ContainerConfig}
- *
- * @param envVariables
- * @return list of {@link EnvVar}
- */
- public static List getEnvFrom(String[] envVariables) {
- LOG.info("Container environment variables:");
- List env = new ArrayList<>();
- for (String envVariable : envVariables) {
- String[] nameAndValue = envVariable.split("=", 2);
- String varName = nameAndValue[0];
- String varValue = nameAndValue[1];
- EnvVar envVar = new EnvVarBuilder().withName(varName).withValue(varValue).build();
- env.add(envVar);
- LOG.info("- {}={}", varName, varValue);
- }
- return env;
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesExecHolder.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesExecHolder.java
deleted file mode 100644
index 47fb9a2bc4..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesExecHolder.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import java.util.Arrays;
-import org.eclipse.che.plugin.openshift.client.OpenShiftConnector;
-
-/**
- * Holder class for metadata about an exec, to be used with {@link OpenShiftConnector}.
- *
- * In OpenShift, {@code createExec()} is not separate from {@code startExec()}, so this class has
- * to be used to pass data between {@code createExec()} and {@code startExec()} calls.
- *
- * @see OpenShiftConnector#createExec(org.eclipse.che.plugin.docker.client.params.CreateExecParams)
- * @see OpenShiftConnector#startExec(org.eclipse.che.plugin.docker.client.params.StartExecParams,
- * org.eclipse.che.plugin.docker.client.MessageProcessor)
- */
-public class KubernetesExecHolder {
-
- private String[] command;
- private String podName;
-
- public KubernetesExecHolder withCommand(String[] command) {
- this.command = command;
- return this;
- }
-
- public KubernetesExecHolder withPod(String podName) {
- this.podName = podName;
- return this;
- }
-
- public String[] getCommand() {
- return command;
- }
-
- public String getPod() {
- return podName;
- }
-
- public String toString() {
- return String.format(
- "KubernetesExecHolder {command=%s, podName=%s}",
- Arrays.asList(command).toString(), podName);
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverter.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverter.java
deleted file mode 100644
index f2325d4f7a..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverter.java
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import org.apache.commons.lang.StringUtils;
-import org.eclipse.che.plugin.docker.client.json.ContainerConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Converter of labels defined in {@link ContainerConfig} for matching to Kubernetes annotation
- * requirements
- */
-public final class KubernetesLabelConverter {
- private static final Logger LOG = LoggerFactory.getLogger(KubernetesLabelConverter.class);
- /** Prefix used for che server labels */
- private static final String CHE_SERVER_LABEL_PREFIX = "che:server";
- /** Padding to use when converting server label to DNS name */
- private static final String CHE_SERVER_LABEL_PADDING = "0%s0";
- /**
- * Regex to use when matching converted labels -- should match {@link CHE_SERVER_LABEL_PADDING}
- */
- private static final Pattern CHE_SERVER_LABEL_KEY = Pattern.compile("^0(.*)0$");
-
- private static final String KUBERNETES_ANNOTATION_REGEX =
- "([A-Za-z0-9][-A-Za-z0-9_\\.]*)?[A-Za-z0-9]";
-
- private KubernetesLabelConverter() {}
-
- /** @return prefix that is used for Che server labels */
- public static String getCheServerLabelPrefix() {
- return CHE_SERVER_LABEL_PREFIX;
- }
-
- /**
- * Converts a map of labels to match Kubernetes annotation requirements. Annotations are limited
- * to alphanumeric characters, {@code '.'}, {@code '_'} and {@code '-'}, and must start and end
- * with an alphanumeric character, i.e. they must match the regex {@code
- * ([A-Za-z0-9][-A-Za-z0-9_.]*)?[A-Za-z0-9]}
- *
- *
Note that entry keys should begin with {@link
- * KubernetesLabelConverter#CHE_SERVER_LABEL_PREFIX} and entries should not contain {@code '.'} or
- * {@code '_'} before conversion; otherwise label will not be converted and included in output.
- *
- *
This implementation is relatively fragile -- changes to how Che generates labels may cause
- * this method to stop working. In general, it will only be possible to convert labels that are
- * alphanumeric plus up to 3 special characters (by converting the special characters to {@code
- * '_'}, {@code '-'}, and {@code '.'} as necessary).
- *
- * @param labels Map of labels to convert
- * @return Map of labels converted to DNS Names
- */
- public static Map labelsToNames(Map labels) {
- Map names = new HashMap<>();
- if (labels == null) {
- return names;
- }
- for (Map.Entry label : labels.entrySet()) {
-
- if (!hasConversionProblems(label)) {
-
- String key = label.getKey();
- String value = label.getValue();
-
- // Convert keys: e.g. "che:server:4401/tcp:ref" ->
- // "che.server.4401-tcp.ref"
- key = convertLabelKey(key);
- // Convert values: e.g. "/api" -> ".api" -- note values may
- // include '-' e.g. "tomcat-debug"
- value = convertLabelValue(value);
-
- // Add padding since DNS names must start and end with
- // alphanumeric characters
- key = addPadding(key);
- value = addPadding(value);
-
- if (matchesKubernetesLabelRegex(key) && matchesKubernetesLabelRegex(value)) {
- names.put(key, value);
- } else {
- LOG.error(
- "Could not convert label {} into Kubernetes annotation: labels must be alphanumeric with ':' and '/'",
- label.toString());
- }
- }
- }
- return names;
- }
-
- /**
- * Undoes the label conversion done by {@link KubernetesLabelConverter#labelsToNames(Map)}
- *
- * @param labels Map of DNS names
- * @return Map of unconverted labels
- */
- public static Map namesToLabels(Map names) {
- Map labels = new HashMap<>();
- if (names == null) {
- return labels;
- }
- for (Map.Entry entry : names.entrySet()) {
- String key = entry.getKey();
- String value = entry.getValue();
-
- // Remove padding
- Matcher keyMatcher = CHE_SERVER_LABEL_KEY.matcher(key);
- Matcher valueMatcher = CHE_SERVER_LABEL_KEY.matcher(value);
- if (!keyMatcher.matches() || !valueMatcher.matches()) {
- continue;
- }
- key = keyMatcher.group(1);
- value = valueMatcher.group(1);
-
- // Convert key: e.g. "che.server.4401_tcp.ref" -> "che:server:4401/tcp:ref"
- key = key.replaceAll("\\.", ":").replaceAll("_", "/");
- // Convert value: e.g. Convert values: e.g. "_api" -> "/api"
- value = value.replaceAll("_", "/");
-
- labels.put(key, value);
- }
- return labels;
- }
-
- /**
- * Checks if there are any potential problems coupled with label conversion
- *
- * @param label
- * @return true if label has no conversion issues, false otherwise
- */
- private static boolean hasConversionProblems(final Map.Entry label) {
- boolean hasProblems = false;
- String key = label.getKey();
- String value = label.getValue();
-
- if (StringUtils.isBlank(value)) {
- LOG.error("The label {} is blank", label.toString());
- hasProblems = true;
- } else if (key.contains(".") || key.contains("_") || value.contains("_")) {
- LOG.error(
- "Cannot convert label {} to DNS Name: '-' and '.' are used as escape characters",
- label.toString());
- hasProblems = true;
- } else if (!key.startsWith(CHE_SERVER_LABEL_PREFIX)) {
- LOG.warn(
- "Expected CreateContainerParams label key {} to start with {}",
- key,
- CHE_SERVER_LABEL_PREFIX);
- }
- return hasProblems;
- }
-
- /** Convert keys: e.g. "che:server:4401/tcp:ref" -> "che.server.4401-tcp.ref" */
- private static String convertLabelKey(final String key) {
- return key.replaceAll(":", ".").replaceAll("/", "_");
- }
-
- /** Convert values: e.g. "/api" -> ".api" Note: values may include '-' e.g. "tomcat-debug" */
- private static String convertLabelValue(final String value) {
- return value.replaceAll("/", "_");
- }
-
- /** Adds padding since DNS names must start and end with alphanumeric characters */
- private static String addPadding(final String label) {
- return String.format(CHE_SERVER_LABEL_PADDING, label);
- }
-
- private static boolean matchesKubernetesLabelRegex(final String label) {
- return label.matches(KUBERNETES_ANNOTATION_REGEX);
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapter.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapter.java
deleted file mode 100644
index 0d297c65bc..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapter.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import io.fabric8.kubernetes.client.Callback;
-import io.fabric8.kubernetes.client.utils.InputStreamPumper;
-import org.eclipse.che.commons.annotation.Nullable;
-import org.eclipse.che.plugin.docker.client.LogMessage;
-import org.eclipse.che.plugin.docker.client.MessageProcessor;
-
-/**
- * Adapter class for passing data from a {@code kubernetes-client} output stream (e.g. for an exec
- * call) to {@link MessageProcessor}. This class should be passed to a {@link InputStreamPumper}
- * along with the output of the exec call.
- *
- * Output passed in via the {@link #call(byte[])} method is parsed into lines, (respecting {@code
- * '\n'} and {@code CRLF} as line separators), and passed to the {@link MessageProcessor} as {@link
- * LogMessage}s.
- */
-public class KubernetesOutputAdapter implements Callback {
-
- private LogMessage.Type type;
- private MessageProcessor execOutputProcessor;
- private StringBuilder lineBuffer;
-
- /**
- * Create a new KubernetesOutputAdapter
- *
- * @param type the type of LogMessages being passed to the MessageProcessor
- * @param processor the processor receiving LogMessages. If null, calling {@link #call(byte[])}
- * will return immediately.
- */
- public KubernetesOutputAdapter(
- LogMessage.Type type, @Nullable MessageProcessor processor) {
- this.type = type;
- this.execOutputProcessor = processor;
- this.lineBuffer = new StringBuilder();
- }
-
- @Override
- public void call(byte[] data) {
- if (data == null || data.length == 0 || execOutputProcessor == null) {
- return;
- }
- int start = 0;
- int offset = 0;
-
- for (int pos = 0; pos < data.length; pos++) {
- if (data[pos] == '\n' || data[pos] == '\r') {
- offset = pos - start;
- String line = new String(data, start, offset);
- lineBuffer.append(line);
- execOutputProcessor.process(new LogMessage(type, lineBuffer.toString()));
- lineBuffer.setLength(0);
- if (data[pos] == '\r') {
- pos += 1;
- }
- start = pos + 1;
- }
- }
- String trailingChars = new String(data, start, data.length - start);
- lineBuffer.append(trailingChars);
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesResourceUtil.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesResourceUtil.java
deleted file mode 100644
index 72a530bddf..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesResourceUtil.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import io.fabric8.kubernetes.api.model.Service;
-import io.fabric8.kubernetes.api.model.ServiceList;
-import io.fabric8.kubernetes.api.model.extensions.Deployment;
-import io.fabric8.kubernetes.api.model.extensions.ReplicaSet;
-import io.fabric8.openshift.api.model.Route;
-import io.fabric8.openshift.api.model.RouteList;
-import io.fabric8.openshift.client.DefaultOpenShiftClient;
-import io.fabric8.openshift.client.OpenShiftClient;
-import java.io.IOException;
-import java.util.List;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public final class KubernetesResourceUtil {
- private static final Logger LOG = LoggerFactory.getLogger(KubernetesResourceUtil.class);
-
- private KubernetesResourceUtil() {}
-
- public static Deployment getDeploymentByName(String deploymentName, String namespace)
- throws IOException {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- Deployment deployment =
- openShiftClient
- .extensions()
- .deployments()
- .inNamespace(namespace)
- .withName(deploymentName)
- .get();
- if (deployment == null) {
- LOG.warn("No Deployment with name {} could be found", deploymentName);
- }
- return deployment;
- }
- }
-
- public static Service getServiceBySelector(
- final String selectorKey, final String selectorValue, final String namespace) {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- ServiceList svcs = openShiftClient.services().inNamespace(namespace).list();
-
- Service svc =
- svcs.getItems()
- .stream()
- .filter(s -> s.getSpec().getSelector().containsKey(selectorKey))
- .filter(s -> s.getSpec().getSelector().get(selectorKey).equals(selectorValue))
- .findAny()
- .orElse(null);
-
- if (svc == null) {
- LOG.warn("No Service with selector {}={} could be found", selectorKey, selectorValue);
- }
- return svc;
- }
- }
-
- public static List getRoutesByLabel(
- final String labelKey, final String labelValue, final String namespace) throws IOException {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- RouteList routeList =
- openShiftClient.routes().inNamespace(namespace).withLabel(labelKey, labelValue).list();
-
- List items = routeList.getItems();
-
- if (items.isEmpty()) {
- LOG.warn("No Route with label {}={} could be found", labelKey, labelValue);
- throw new IOException(
- "No Route with label " + labelKey + "=" + labelValue + " could be found");
- }
-
- return items;
- }
- }
-
- public static List getReplicaSetByLabel(
- final String key, final String value, final String namespace) {
- try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
- List replicaSets =
- openShiftClient
- .extensions()
- .replicaSets()
- .inNamespace(namespace)
- .withLabel(key, value)
- .list()
- .getItems();
- return replicaSets;
- }
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesService.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesService.java
deleted file mode 100644
index 6d0727844d..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesService.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import static com.google.common.base.Strings.isNullOrEmpty;
-
-import io.fabric8.kubernetes.api.model.IntOrString;
-import io.fabric8.kubernetes.api.model.ServicePort;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import org.eclipse.che.plugin.docker.client.json.ContainerConfig;
-import org.eclipse.che.plugin.docker.client.json.ImageConfig;
-
-/** Provides API for managing Kubernetes {@link ServicePort} */
-public final class KubernetesService {
-
- private KubernetesService() {}
-
- /**
- * Retrieves list of {@link ServicePort} based on ports defined in {@link ContainerConfig} and
- * {@link ImageConfig}
- *
- * @param exposedPorts
- * @return list of {@link ServicePort}
- */
- public static List getServicePortsFrom(
- Set exposedPorts, Map portsToRefName) {
- List servicePorts = new ArrayList<>(exposedPorts.size());
- for (String exposedPort : exposedPorts) {
- String[] portAndProtocol = exposedPort.split("/", 2);
- String port = portAndProtocol[0];
- String protocol = portAndProtocol[1];
-
- int portNumber = Integer.parseInt(port);
- String portName = portsToRefName.get(exposedPort);
- portName = isNullOrEmpty(portName) ? "server-" + exposedPort.replace("/", "-") : portName;
-
- int targetPortNumber = portNumber;
- ServicePort servicePort = new ServicePort();
- servicePort.setName(portName);
- servicePort.setProtocol(protocol.toUpperCase());
- servicePort.setPort(portNumber);
- servicePort.setTargetPort(new IntOrString(targetPortNumber));
- servicePorts.add(servicePort);
- }
- return servicePorts;
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesStringUtils.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesStringUtils.java
deleted file mode 100644
index 1e2009720e..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesStringUtils.java
+++ /dev/null
@@ -1,176 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import org.apache.commons.lang.RandomStringUtils;
-import org.apache.commons.lang.StringUtils;
-
-public final class KubernetesStringUtils {
-
- /** Max length of a Kubernetes name or label; */
- private static final int MAX_CHARS = 63;
-
- private static final String DOCKER_PREFIX = "docker://";
-
- private KubernetesStringUtils() {}
-
- /**
- * Converts strings to fit requirements of Kubernetes names and labels. Names in Kubernetes are
- * limited to 63 characters.
- *
- * @param input the string to normalize
- * @return the normalized string.
- */
- public static String getNormalizedString(String input) {
- int end = Math.min(input.length(), MAX_CHARS - 1);
- return input.substring(0, end);
- }
-
- /**
- * @param containerID
- * @return normalized version of 'ContainerID' without 'docker://' prefix and double quotes
- */
- public static String normalizeContainerID(final String containerID) {
- return StringUtils.replaceOnce(containerID, DOCKER_PREFIX, "").replace("\"", "");
- }
-
- /**
- * @param containerID
- * @return label based on 'ContainerID' (first 12 chars of ID)
- */
- public static String getLabelFromContainerID(final String containerID) {
- return StringUtils.substring(containerID, 0, 12);
- }
-
- /**
- * Che workspace id is used as OpenShift service / deployment config name and must match the regex
- * [a-z]([-a-z0-9]*[a-z0-9]) e.g. "q5iuhkwjvw1w9emg"
- *
- * @return randomly generated workspace id
- */
- public static String generateWorkspaceID() {
- return RandomStringUtils.random(16, true, true).toLowerCase();
- }
-
- /**
- * Converts a String into a suitable name for an openshift container. Kubernetes names are limited
- * to 63 chars and must match the regex {@code [a-z0-9]([-a-z0-9]*[a-z0-9])?}
- *
- * @param input the string to convert
- */
- public static String convertToContainerName(String input) {
- if (input.startsWith("workspace")) {
- input = input.replaceFirst("workspace", "");
- }
- return getNormalizedString(input.replaceAll("_", "-"));
- }
-
- /**
- * Converts image stream name (e.g. eclipse/ubuntu_jdk8 to eclipse_ubuntu_jdk8). This has to be
- * done because for OpenShift ImageStream names, the organization component of a docker repository
- * is the namespace of the ImageStream, and so '/' is not supported in ImageStream names.
- *
- * @param repository the original docker repository String.
- * @return
- */
- public static String convertPullSpecToImageStreamName(String repository) {
- repository = stripTagFromPullSpec(repository);
- return getNormalizedString(repository.replaceAll("/", "_"));
- }
-
- /**
- * Generates a name to be used as a tag from a docker repository. In OpenShift, tagging
- * functionality is limited, so while in Docker we may want to
- *
- * {@code docker tag eclipse/ubuntu_jdk8 eclipse-che/},
- *
- * this is not possible in OpenShift. This method returns a trimmed version of {@code
- * }
- *
- * @param repository the target repository spec in a {@code docker tag} command.
- * @return an appropriate tag name
- */
- public static String convertPullSpecToTagName(String repository) {
- String name;
- if (repository.contains("/")) {
- String[] nameSegments = repository.split("/");
- name = nameSegments[nameSegments.length - 1];
- } else {
- name = repository;
- }
- name = stripTagFromPullSpec(name);
- name =
- name.replaceAll("workspace", "")
- .replaceAll("machine", "")
- .replaceAll("che_.*", "")
- .replaceAll("_", "");
-
- return getNormalizedString(name);
- }
-
- /**
- * Gets an ImageStreamTag name from docker pull specs by converting repository strings to suit the
- * convention used in {@link KubernetesStringUtils#convertPullSpecToImageStreamName(String)} and
- * {@link KubernetesStringUtils#convertPullSpecToTagName(String)}.
- *
- * e.g. will convert {@code eclipse/ubuntu_jdk8} and {@code eclipse-che/} into
- * {@code eclipse_ubuntu_jdk8:}
- *
- * @param oldRepository The docker image repository that is tracked by the ImageStream
- * @param newRepository The docker repository that has been tagged to follow oldRepository
- * @return A string that can be used to refer to the ImageStreamTag formed from these
- * repositories.
- */
- public static String createImageStreamTagName(String oldRepository, String newRepository) {
- String tag = convertPullSpecToTagName(newRepository);
- String repo = convertPullSpecToImageStreamName(oldRepository);
- return getNormalizedString(String.format("%s:%s", repo, tag));
- }
-
- /**
- * Gets the ImageStreamName fromm a docker pull spec. For example, provided {@code
- * []/[]/:[]}, will return just {@code }
- *
- * In the case where the pull spec does not contain optional components, this method simply
- * returns the pull spec provided.
- *
- * @param pullSpec
- * @return
- */
- public static String getImageStreamNameFromPullSpec(String pullSpec) {
- return pullSpec.replaceAll(".*/", "").replaceAll(":.*", "");
- }
-
- /**
- * Remove the tag from a pull spec, if applicable. If pull spec does not include a tag, returns
- * the pull spec unchanged.
- *
- * @param pullSpec
- * @return
- */
- public static String stripTagFromPullSpec(String pullSpec) {
- return pullSpec.replaceAll(":.*", "");
- }
-
- /**
- * Gets the tag fromm a docker pull spec. For example, provided {@code
- * []/[]/:[]}, will return just {@code }
- *
- * @param pullSpec
- * @return the tag on the pull spec, or null if pull spec does not contain a tag
- */
- public static String getTagNameFromPullSpec(String pullSpec) {
- if (!pullSpec.contains(":")) {
- return null;
- }
- return pullSpec.replaceAll(".*:", "");
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftConnectorTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftConnectorTest.java
deleted file mode 100644
index 3242dfcf34..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftConnectorTest.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertNotNull;
-
-import java.io.IOException;
-import java.util.Map;
-import java.util.Set;
-import org.eclipse.che.api.core.notification.EventService;
-import org.eclipse.che.plugin.docker.client.DockerApiVersionPathPrefixProvider;
-import org.eclipse.che.plugin.docker.client.DockerConnectorConfiguration;
-import org.eclipse.che.plugin.docker.client.DockerRegistryAuthResolver;
-import org.eclipse.che.plugin.docker.client.connection.DockerConnectionFactory;
-import org.eclipse.che.plugin.docker.client.json.ContainerConfig;
-import org.eclipse.che.plugin.docker.client.json.ImageConfig;
-import org.eclipse.che.plugin.docker.client.params.CreateContainerParams;
-import org.mockito.Mock;
-import org.mockito.Mockito;
-import org.mockito.testng.MockitoTestNGListener;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Listeners;
-import org.testng.annotations.Test;
-
-@Listeners(MockitoTestNGListener.class)
-public class OpenShiftConnectorTest {
- private static final String[] CONTAINER_ENV_VARIABLES = {"CHE_WORKSPACE_ID=abcd1234"};
- private static final String CHE_DEFAULT_OPENSHIFT_PROJECT_NAME = "eclipse-che";
- private static final int OPENSHIFT_LIVENESS_PROBE_DELAY = 300;
- private static final int OPENSHIFT_LIVENESS_PROBE_TIMEOUT = 1;
- private static final String OPENSHIFT_DEFAULT_WORKSPACE_PERSISTENT_VOLUME_CLAIM =
- "che_claim_data";
- private static final String OPENSHIFT_DEFAULT_WORKSPACE_QUANTITY = "10Gi";
- private static final String OPENSHIFT_DEFAULT_WORKSPACE_STORAGE = "/data/workspaces";
- private static final String OPENSHIFT_DEFAULT_WORKSPACE_PROJECTS_STORAGE = "/projects";
- private static final String CHE_DEFAULT_SERVER_EXTERNAL_ADDRESS = "che.openshift.mini";
- private static final String CHE_WORKSPACE_CPU_LIMIT = "1";
- private static final boolean SECURE_ROUTES = false;
- private static final boolean CREATE_WORKSPACE_DIRS = true;
-
- @Mock private DockerConnectorConfiguration dockerConnectorConfiguration;
- @Mock private DockerConnectionFactory dockerConnectionFactory;
- @Mock private DockerRegistryAuthResolver authManager;
- @Mock private DockerApiVersionPathPrefixProvider dockerApiVersionPathPrefixProvider;
- @Mock private CreateContainerParams createContainerParams;
- @Mock private EventService eventService;
- @Mock private OpenShiftPvcHelper openShiftPvcHelper;
- @Mock private OpenShiftRouteCreator openShiftRouteCreator;
- @Mock private OpenShiftDeploymentCleaner openShiftDeploymentCleaner;
-
- private OpenShiftConnector openShiftConnector;
-
- @BeforeMethod
- private void setup() {
-
- //When
- openShiftConnector =
- new OpenShiftConnector(
- dockerConnectorConfiguration,
- dockerConnectionFactory,
- authManager,
- dockerApiVersionPathPrefixProvider,
- openShiftPvcHelper,
- openShiftRouteCreator,
- openShiftDeploymentCleaner,
- eventService,
- CHE_DEFAULT_SERVER_EXTERNAL_ADDRESS,
- CHE_DEFAULT_OPENSHIFT_PROJECT_NAME,
- OPENSHIFT_LIVENESS_PROBE_DELAY,
- OPENSHIFT_LIVENESS_PROBE_TIMEOUT,
- OPENSHIFT_DEFAULT_WORKSPACE_PERSISTENT_VOLUME_CLAIM,
- OPENSHIFT_DEFAULT_WORKSPACE_QUANTITY,
- OPENSHIFT_DEFAULT_WORKSPACE_STORAGE,
- OPENSHIFT_DEFAULT_WORKSPACE_PROJECTS_STORAGE,
- CHE_WORKSPACE_CPU_LIMIT,
- null,
- SECURE_ROUTES,
- CREATE_WORKSPACE_DIRS);
- }
-
- @Test
- public void shouldGetWorkspaceIDWhenAValidOneIsProvidedInCreateContainerParams()
- throws IOException {
- //Given
- String expectedWorkspaceID = "abcd1234";
- ContainerConfig containerConfig = mock(ContainerConfig.class);
- CreateContainerParams createContainerParams = CreateContainerParams.create(containerConfig);
-
- when(containerConfig.getEnv()).thenReturn(CONTAINER_ENV_VARIABLES);
-
- String workspaceID = openShiftConnector.getCheWorkspaceId(createContainerParams);
-
- //Then
- assertEquals(workspaceID, expectedWorkspaceID);
- }
-
- /** Check that we return empty map if no labels and not a NPE */
- @Test
- public void checkWithNoLabels() {
- ContainerConfig containerConfig = Mockito.mock(ContainerConfig.class);
- when(containerConfig.getLabels()).thenReturn(null);
-
- ImageConfig imageConfig = Mockito.mock(ImageConfig.class);
- when(imageConfig.getLabels()).thenReturn(null);
-
- Map map = openShiftConnector.getLabels(containerConfig, imageConfig);
- assertNotNull(map);
- assertEquals(map.size(), 0);
- }
-
- /** Check that we return empty ports if no export ports and not a NPE */
- @Test
- public void checkWithNoExposedPorts() {
- ContainerConfig containerConfig = Mockito.mock(ContainerConfig.class);
- when(containerConfig.getExposedPorts()).thenReturn(null);
-
- ImageConfig imageConfig = Mockito.mock(ImageConfig.class);
- when(imageConfig.getExposedPorts()).thenReturn(null);
-
- Set mapPorts = openShiftConnector.getExposedPorts(containerConfig, imageConfig);
- assertNotNull(mapPorts);
- assertEquals(mapPorts.size(), 0);
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleanerTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleanerTest.java
deleted file mode 100644
index 93c1c2c990..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleanerTest.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client;
-
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.doNothing;
-import static org.mockito.Mockito.eq;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.never;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-import static org.testng.Assert.assertEquals;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
-import org.eclipse.che.api.core.ServerException;
-import org.eclipse.che.api.core.model.workspace.Workspace;
-import org.eclipse.che.api.core.notification.EventService;
-import org.eclipse.che.api.core.notification.EventSubscriber;
-import org.eclipse.che.api.workspace.server.event.ServerIdleEvent;
-import org.eclipse.che.api.workspace.server.model.impl.WorkspaceConfigImpl;
-import org.eclipse.che.api.workspace.server.model.impl.WorkspaceImpl;
-import org.eclipse.che.api.workspace.shared.event.WorkspaceRemovedEvent;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Captor;
-import org.mockito.Mock;
-import org.mockito.MockitoAnnotations;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-public class OpenShiftWorkspaceFilesCleanerTest {
-
- private static final String CHE_OPENSHIFT_PROJECT = "eclipse-che";
- private static final String WORKSPACES_PVC_NAME = "che-data-volume";
- private static final String WORKSPACE_ONE = "testworkspaceone";
- private static final String WORKSPACE_TWO = "testworkspacetwo";
-
- @Mock private OpenShiftPvcHelper pvcHelper;
- @Captor private ArgumentCaptor> subscriberCaptor;
-
- private OpenShiftWorkspaceFilesCleaner cleaner;
-
- @BeforeMethod
- public void setup() {
- MockitoAnnotations.initMocks(this);
- cleaner =
- new OpenShiftWorkspaceFilesCleaner(pvcHelper, CHE_OPENSHIFT_PROJECT, WORKSPACES_PVC_NAME);
- }
-
- @Test
- public void shouldSubscribeToEventService() {
- //given
- EventService eventService = mock(EventService.class);
-
- //when
- cleaner.subscribe(eventService);
-
- //then
- verify(eventService).subscribe(cleaner);
- verify(eventService).subscribe(any(), eq(ServerIdleEvent.class));
- }
-
- @Test
- public void shouldDeleteWorkspaceInQueueOnServerIdleEvent() {
- //given
- OpenShiftWorkspaceFilesCleaner spyCleaner = spy(cleaner);
- doNothing().when(spyCleaner).deleteWorkspacesInQueue();
-
- EventService eventService = mock(EventService.class);
- spyCleaner.subscribe(eventService);
- verify(eventService).subscribe(subscriberCaptor.capture(), eq(ServerIdleEvent.class));
- EventSubscriber subscriber = subscriberCaptor.getValue();
-
- //when
- subscriber.onEvent(new ServerIdleEvent(1000));
-
- //then
- verify(spyCleaner).deleteWorkspacesInQueue();
- }
-
- @Test
- public void shouldDoNothingWithoutIdleEvent() throws ServerException, IOException {
- // Given
- Workspace workspace = generateWorkspace(WORKSPACE_ONE);
-
- // When
- cleaner.onEvent(new WorkspaceRemovedEvent(workspace));
-
- // Then
- verify(pvcHelper, never())
- .createJobPod(
- anyString(),
- anyString(),
- anyString(),
- any(OpenShiftPvcHelper.Command.class),
- any(String[].class));
- }
-
- @Test
- public void shouldDeleteWorkspaceOnIdleEvent() throws ServerException, IOException {
- // Given
- Workspace workspace = generateWorkspace(WORKSPACE_ONE);
-
- // When
- cleaner.onEvent(new WorkspaceRemovedEvent(workspace));
- cleaner.deleteWorkspacesInQueue();
-
- // Then
- verify(pvcHelper, times(1))
- .createJobPod(
- anyString(),
- anyString(),
- anyString(),
- eq(OpenShiftPvcHelper.Command.REMOVE),
- eq(WORKSPACE_ONE));
- }
-
- @Test
- public void shouldDeleteMultipleQueuedWorkspacesAtOnce() throws ServerException, IOException {
- // Given
- Workspace workspaceOne = generateWorkspace(WORKSPACE_ONE);
- Workspace workspaceTwo = generateWorkspace(WORKSPACE_TWO);
- String[] expectedDirs = new String[] {WORKSPACE_ONE, WORKSPACE_TWO};
- ArgumentCaptor dirCaptor = ArgumentCaptor.forClass(String.class);
-
- // When
- cleaner.onEvent(new WorkspaceRemovedEvent(workspaceOne));
- cleaner.onEvent(new WorkspaceRemovedEvent(workspaceTwo));
- cleaner.deleteWorkspacesInQueue();
-
- // Then
- verify(pvcHelper, times(1))
- .createJobPod(
- anyString(),
- anyString(),
- anyString(),
- eq(OpenShiftPvcHelper.Command.REMOVE),
- dirCaptor.capture(), // Varargs capture doesn't seem to work.
- dirCaptor.capture());
-
- List dirs = dirCaptor.getAllValues();
- String[] actualDirs = dirs.toArray(new String[dirs.size()]);
- // Sort arrays to ignore order
- Arrays.sort(actualDirs);
- Arrays.sort(expectedDirs);
- assertEquals(actualDirs, expectedDirs, "Expected all dirs to be deleted when server is idled.");
- }
-
- @Test
- public void shouldRetainQueueIfDeletionFails() throws ServerException, IOException {
- // Given
- Workspace workspaceOne = generateWorkspace(WORKSPACE_ONE);
- when(pvcHelper.createJobPod(any(), any(), any(), any(), any())).thenReturn(false);
-
- // When
- cleaner.onEvent(new WorkspaceRemovedEvent(workspaceOne));
- cleaner.deleteWorkspacesInQueue();
-
- // Then
- verify(pvcHelper, times(1))
- .createJobPod(
- anyString(),
- anyString(),
- anyString(),
- eq(OpenShiftPvcHelper.Command.REMOVE),
- eq(WORKSPACE_ONE));
-
- // When
- cleaner.deleteWorkspacesInQueue();
-
- // Then
- verify(pvcHelper, times(2))
- .createJobPod(
- anyString(),
- anyString(),
- anyString(),
- eq(OpenShiftPvcHelper.Command.REMOVE),
- eq(WORKSPACE_ONE));
- }
-
- @Test
- public void shouldUseProjectNamespaceAndPvcNameAsParameters()
- throws ServerException, IOException {
- // Given
- Workspace workspaceOne = generateWorkspace(WORKSPACE_ONE);
-
- // When
- cleaner.onEvent(new WorkspaceRemovedEvent(workspaceOne));
- cleaner.deleteWorkspacesInQueue();
-
- // Then
- verify(pvcHelper, times(1))
- .createJobPod(
- eq(WORKSPACES_PVC_NAME),
- eq(CHE_OPENSHIFT_PROJECT),
- anyString(),
- eq(OpenShiftPvcHelper.Command.REMOVE),
- eq(WORKSPACE_ONE));
- }
-
- private Workspace generateWorkspace(String id) {
- WorkspaceConfigImpl config = new WorkspaceConfigImpl();
- config.setName(id);
- return new WorkspaceImpl(id, null, config);
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainerTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainerTest.java
deleted file mode 100644
index be4622a701..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainerTest.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import static org.testng.Assert.assertTrue;
-
-import io.fabric8.kubernetes.api.model.ContainerPort;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.eclipse.che.plugin.docker.client.json.ExposedPort;
-import org.testng.annotations.Test;
-
-public class KubernetesContainerTest {
-
- @Test
- public void shouldReturnContainerPortFromExposedPortList() {
- // Given
- Set exposedPorts = new HashSet<>();
- exposedPorts.add("8080/tcp");
- exposedPorts.add("22/tcp");
- exposedPorts.add("4401/tcp");
- exposedPorts.add("4403/tcp");
- Map portsToRefName = new HashMap<>();
- portsToRefName.put("8080/tcp", "tomcat");
-
- // When
- List containerPorts =
- KubernetesContainer.getContainerPortsFrom(exposedPorts, portsToRefName);
-
- // Then
- List portsAndProtocols =
- containerPorts
- .stream()
- .map(p -> Integer.toString(p.getContainerPort()) + "/" + p.getProtocol().toLowerCase())
- .collect(Collectors.toList());
- assertTrue(exposedPorts.stream().allMatch(portsAndProtocols::contains));
- }
-
- @Test
- public void shouldReturnContainerPortListFromImageExposedPortList() {
- // Given
- Map imageExposedPorts = new HashMap<>();
- imageExposedPorts.put("8080/tcp", new ExposedPort());
- Map portsToRefName = new HashMap<>();
- portsToRefName.put("8080/tcp", "tomcat");
-
- // When
- List containerPorts =
- KubernetesContainer.getContainerPortsFrom(imageExposedPorts.keySet(), portsToRefName);
-
- // Then
- List portsAndProtocols =
- containerPorts
- .stream()
- .map(p -> Integer.toString(p.getContainerPort()) + "/" + p.getProtocol().toLowerCase())
- .collect(Collectors.toList());
- assertTrue(imageExposedPorts.keySet().stream().allMatch(portsAndProtocols::contains));
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVarTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVarTest.java
deleted file mode 100644
index 9c09b7fb26..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVarTest.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import static org.testng.Assert.assertTrue;
-
-import io.fabric8.kubernetes.api.model.EnvVar;
-import java.util.Arrays;
-import java.util.List;
-import java.util.stream.Collectors;
-import org.testng.annotations.Test;
-
-public class KubernetesEnvVarTest {
-
- @Test
- public void shouldReturnContainerEnvFromEnvVariableArray() {
- // Given
- String[] envVariables = {
- "CHE_LOCAL_CONF_DIR=/mnt/che/conf",
- "USER_TOKEN=dummy_token",
- "CHE_API_ENDPOINT=http://172.17.0.4:8080/wsmaster/api",
- "JAVA_OPTS=-Xms256m -Xmx2048m -Djava.security.egd=file:/dev/./urandom",
- "CHE_WORKSPACE_ID=workspaceID",
- "CHE_PROJECTS_ROOT=/projects",
- "TOMCAT_HOME=/home/user/tomcat8",
- "M2_HOME=/home/user/apache-maven-3.3.9",
- "TERM=xterm",
- "LANG=en_US.UTF-8"
- };
-
- // When
- List env = KubernetesEnvVar.getEnvFrom(envVariables);
-
- // Then
- List keysAndValues =
- env.stream().map(k -> k.getName() + "=" + k.getValue()).collect(Collectors.toList());
- assertTrue(Arrays.stream(envVariables).allMatch(keysAndValues::contains));
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverterTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverterTest.java
deleted file mode 100644
index 598fefab02..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverterTest.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-import java.util.HashMap;
-import java.util.Map;
-import org.testng.annotations.Test;
-
-public class KubernetesLabelConverterTest {
-
- private final String prefix = KubernetesLabelConverter.getCheServerLabelPrefix();
-
- @Test
- public void shouldConvertLabelsToValidKubernetesLabelNames() {
- String validLabelRegex = "([A-Za-z0-9][-A-Za-z0-9_\\.]*)?[A-Za-z0-9]";
-
- // Given
- Map labels = new HashMap<>();
- labels.put(prefix + "4401/tcp:path:", "/api");
- labels.put(prefix + "8000/tcp:ref:", "tomcat-debug");
-
- // When
- Map converted = KubernetesLabelConverter.labelsToNames(labels);
-
- // Then
- for (Map.Entry entry : converted.entrySet()) {
- assertTrue(
- entry.getKey().matches(validLabelRegex),
- String.format("Converted Key %s should be valid Kubernetes label name", entry.getKey()));
- assertTrue(
- entry.getValue().matches(validLabelRegex),
- String.format(
- "Converted Value %s should be valid Kubernetes label name", entry.getValue()));
- }
- }
-
- @Test
- public void shouldBeAbleToRecoverOriginalLabelsAfterConversion() {
- // Given
- Map originalLabels = new HashMap<>();
- originalLabels.put(prefix + "4401/tcp:path:", "/api");
- originalLabels.put(prefix + "8000/tcp:ref:", "tomcat-debug");
-
- // When
- Map converted = KubernetesLabelConverter.labelsToNames(originalLabels);
- Map unconverted = KubernetesLabelConverter.namesToLabels(converted);
-
- // Then
- assertEquals(originalLabels, unconverted);
- }
-
- @Test
- public void shouldIgnoreAndLogProblemLabels() {
- // Given
- Map originalLabels = new HashMap<>();
- Map validLabels = new HashMap<>();
- validLabels.put(prefix + "4401/tcp:path:", "/api");
- validLabels.put(prefix + "8000/tcp:ref:", "tomcat-debug");
- Map invalidLabels = new HashMap<>();
- invalidLabels.put(prefix + "9999/t.cp:path:", "/api");
- invalidLabels.put(prefix + "1111/tcp:path:", "/a_pi");
-
- originalLabels.putAll(validLabels);
- originalLabels.putAll(invalidLabels);
-
- // When
- Map converted = KubernetesLabelConverter.labelsToNames(originalLabels);
- Map unconverted = KubernetesLabelConverter.namesToLabels(converted);
-
- // Then
- assertTrue(
- validLabels.entrySet().stream().allMatch(unconverted.entrySet()::contains),
- "Valid labels should be there when converting + unconverting");
- assertTrue(
- invalidLabels.entrySet().stream().noneMatch(unconverted.entrySet()::contains),
- "Labels with invalid characters should be ignored");
- }
-
- @Test
- public void shouldIgnoreEmptyValues() {
- // Given
- Map originalLabels = new HashMap<>();
- originalLabels.put(prefix + "4401/tcp:path:", null);
- originalLabels.put(prefix + "4402/tcp:path:", "");
- originalLabels.put(prefix + "4403/tcp:path:", " ");
-
- // When
- Map converted = KubernetesLabelConverter.labelsToNames(originalLabels);
-
- // Then
- assertTrue(
- converted.isEmpty(), "Labels with null, empty, or whitespace values should be ignored");
- }
-
- @Test
- public void shouldNotIgnoreValuesWithoutPrefix() {
- // Given
- Map originalLabels = new HashMap<>();
- originalLabels.put("4401/tcp:path:", "/api");
- originalLabels.put(prefix + "8000/tcp:ref:", "tomcat-debug");
-
- // When
- Map converted = KubernetesLabelConverter.labelsToNames(originalLabels);
-
- // Then
- // Currently we put a warning in the logs but convert these labels anyways.
- assertTrue(converted.size() == 2, "Should convert labels even without prefix");
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapterTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapterTest.java
deleted file mode 100644
index 3f2985fa17..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapterTest.java
+++ /dev/null
@@ -1,240 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-import java.util.ArrayList;
-import java.util.List;
-import org.eclipse.che.plugin.docker.client.LogMessage;
-import org.eclipse.che.plugin.docker.client.MessageProcessor;
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-public class KubernetesOutputAdapterTest {
-
- private static LogMessage.Type LOG_TYPE = LogMessage.Type.DOCKER;
- private testMessageProcessor processor;
- private KubernetesOutputAdapter adapter;
-
- private class testMessageProcessor implements MessageProcessor {
-
- private List messages;
- private LogMessage.Type type = null;
-
- public testMessageProcessor() {
- this.messages = new ArrayList<>();
- }
-
- @Override
- public void process(LogMessage message) {
- LogMessage.Type messageType = message.getType();
- if (type == null) {
- type = messageType;
- }
- messages.add(message.getContent());
- }
-
- public List getMessages() {
- return new ArrayList<>(messages);
- }
-
- public LogMessage.Type getType() {
- return type;
- }
- };
-
- @BeforeMethod
- public void setUp() {
- processor = new testMessageProcessor();
- adapter = new KubernetesOutputAdapter(LOG_TYPE, processor);
- }
-
- @Test
- public void shouldBreakLinesCorrectly() {
- // Given
- byte[] input = "line1\nline2\n".getBytes();
- List expected = generateExpected("line1", "line2");
-
- // When
- adapter.call(input);
-
- // Then
- List actual = processor.getMessages();
- assertEquals(actual, expected, "Should break lines on \\n char");
- }
-
- @Test
- public void shouldCacheUnfinishedLinesBetweenCalls() {
- // Given
- byte[] firstInput = "line1\nlin".getBytes();
- byte[] secondInput = "e2\nline3\n".getBytes();
- List expected = generateExpected("line1", "line2", "line3");
-
- // When
- adapter.call(firstInput);
- adapter.call(secondInput);
-
- // Then
- List actual = processor.getMessages();
- assertEquals(actual, expected, "Should store unfinished lines between calls");
- }
-
- @Test
- public void shouldUseProvidedLogMessageType() {
- for (LogMessage.Type type : LogMessage.Type.values()) {
- // Given
- byte[] input = "line1\n".getBytes();
- LogMessage.Type expected = type;
- processor = new testMessageProcessor();
- adapter = new KubernetesOutputAdapter(type, processor);
-
- // When
- adapter.call(input);
-
- // Then
- LogMessage.Type actual = processor.getType();
- assertEquals(actual, expected, "Should call MessageProcessor with provided type");
- }
- }
-
- @Test
- public void shouldBreakLinesNormallyWithCarriageReturn() {
- // Given
- byte[] input = "line1\r\nline2\n".getBytes();
- List expected = generateExpected("line1", "line2");
-
- // When
- adapter.call(input);
-
- // Then
- List actual = processor.getMessages();
- assertEquals(actual, expected, "Should break lines normally on \\r\\n characters");
- }
-
- @Test
- public void shouldNotIgnoreEmptyLines() {
- // Given
- byte[] input = "line1\n\nline2\n".getBytes();
- List expected = generateExpected("line1", "", "line2");
-
- // When
- adapter.call(input);
-
- // Then
- List actual = processor.getMessages();
- assertEquals(actual, expected, "Should call processor.process() with empty Strings");
- }
-
- @Test
- public void shouldNotCallWithoutFinalNewline() {
- // Given
- byte[] input = "line1\nline2".getBytes(); // No trailing \n
- List firstExpected = generateExpected("line1");
- List secondExpected = generateExpected("line1", "line2");
-
- // When
- adapter.call(input);
-
- // Then
- List firstActual = processor.getMessages();
- assertEquals(
- firstActual,
- firstExpected,
- "Should only process lines when they are terminated by \\n or \\r\\n");
-
- // When
- adapter.call("\n".getBytes());
-
- // Then
- List secondActual = processor.getMessages();
- assertEquals(secondActual, secondExpected, "Should buffer lines until newline is encountered.");
- }
-
- @Test
- public void shouldIgnoreNullCalls() {
- // Given
- byte[] firstInput = "line1\n".getBytes();
- byte[] secondInput = "line2\n".getBytes();
- List expected = generateExpected("line1", "line2");
-
- // When
- adapter.call(firstInput);
- adapter.call(null);
- adapter.call(secondInput);
-
- // Then
- List actual = processor.getMessages();
- assertEquals(actual, expected, "Should ignore calls with null arguments");
- }
-
- @Test
- public void shouldKeepBufferPastNullCalls() {
- // Given
- byte[] firstInput = "lin".getBytes();
- byte[] secondInput = "e1\nline2\n".getBytes();
- List expected = generateExpected("line1", "line2");
-
- // When
- adapter.call(firstInput);
- adapter.call(null);
- adapter.call(secondInput);
-
- // Then
- List actual = processor.getMessages();
- assertEquals(actual, expected, "Should ignore calls with null arguments");
- }
-
- @Test
- public void shouldDoNothingWhenExecOutputProcessorIsNull() {
- // Given
- byte[] firstInput = "line1\n".getBytes();
- byte[] secondInput = "line2\n".getBytes();
- adapter = new KubernetesOutputAdapter(LOG_TYPE, null);
-
- // When
- adapter.call(firstInput);
- adapter.call(secondInput);
-
- // Then
- List actual = processor.getMessages();
- assertTrue(actual.isEmpty(), "Should do nothing when ExecOutputProcessor is null");
- }
-
- @Test
- public void shouldIgnoreCallsWhenDataIsEmpty() {
- // Given
- byte[] emptyInput = "".getBytes();
- byte[] firstInput = "line1\n".getBytes();
- byte[] secondInput = "line2\n".getBytes();
- List expected = generateExpected("line1", "line2");
-
- // When
- adapter.call(emptyInput);
- adapter.call(firstInput);
- adapter.call(emptyInput);
- adapter.call(secondInput);
- adapter.call(emptyInput);
-
- // Then
- List actual = processor.getMessages();
- assertEquals(actual, expected, "KubernetesOutputAdapter ignore empty data calls");
- }
-
- private List generateExpected(String... strings) {
- List expected = new ArrayList<>();
- for (String string : strings) {
- expected.add(string);
- }
- return expected;
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesServiceTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesServiceTest.java
deleted file mode 100644
index 891e4000b6..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesServiceTest.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import static org.testng.Assert.assertTrue;
-
-import io.fabric8.kubernetes.api.model.ServicePort;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.eclipse.che.plugin.docker.client.json.ExposedPort;
-import org.testng.annotations.Test;
-
-public class KubernetesServiceTest {
-
- @Test
- public void shouldReturnServicePortListFromImageExposedPortList() {
- // Given
- Map imageExposedPorts = new HashMap<>();
- imageExposedPorts.put("8080/TCP", new ExposedPort());
- Map portsToRefName = new HashMap<>();
- portsToRefName.put("8080/tcp", "tomcat");
-
- // When
- List servicePorts =
- KubernetesService.getServicePortsFrom(imageExposedPorts.keySet(), portsToRefName);
-
- // Then
- List portsAndProtocols =
- servicePorts
- .stream()
- .map(p -> Integer.toString(p.getPort()) + "/" + p.getProtocol())
- .collect(Collectors.toList());
- assertTrue(imageExposedPorts.keySet().stream().allMatch(portsAndProtocols::contains));
- }
-
- @Test
- public void shouldReturnServicePortListFromExposedPortList() {
- // Given
- Map> exposedPorts = new HashMap<>();
- exposedPorts.put("8080/TCP", null);
- exposedPorts.put("22/TCP", null);
- exposedPorts.put("4401/TCP", null);
- exposedPorts.put("4403/TCP", null);
- Map portsToRefName = new HashMap<>();
- portsToRefName.put("8080/tcp", "tomcat");
-
- // When
- List servicePorts =
- KubernetesService.getServicePortsFrom(exposedPorts.keySet(), portsToRefName);
-
- // Then
- List portsAndProtocols =
- servicePorts
- .stream()
- .map(p -> Integer.toString(p.getPort()) + "/" + p.getProtocol())
- .collect(Collectors.toList());
- assertTrue(exposedPorts.keySet().stream().allMatch(portsAndProtocols::contains));
- }
-
- @Test
- public void shouldReturnServicePortNameWhenKnownPortNumberIsProvided() {
- // Given
- Map> exposedPorts = new HashMap<>();
- exposedPorts.put("22/tcp", null);
- exposedPorts.put("4401/tcp", null);
- exposedPorts.put("4403/tcp", null);
- exposedPorts.put("4411/tcp", null);
- exposedPorts.put("4412/tcp", null);
- exposedPorts.put("8080/tcp", null);
- exposedPorts.put("8000/tcp", null);
- exposedPorts.put("9876/tcp", null);
- Map portsToRefName = new HashMap<>();
- portsToRefName.put("22/tcp", "sshd");
- portsToRefName.put("4401/tcp", "wsagent");
- portsToRefName.put("4403/tcp", "wsagent-jpda");
- portsToRefName.put("4411/tcp", "terminal");
- portsToRefName.put("4412/tcp", "exec-agent");
- portsToRefName.put("8080/tcp", "tomcat");
- portsToRefName.put("8000/tcp", "tomcat-jpda");
- portsToRefName.put("9876/tcp", "codeserver");
-
- Set expectedPortNames = new HashSet<>();
- expectedPortNames.add("sshd");
- expectedPortNames.add("wsagent");
- expectedPortNames.add("wsagent-jpda");
- expectedPortNames.add("terminal");
- expectedPortNames.add("exec-agent");
- expectedPortNames.add("tomcat");
- expectedPortNames.add("tomcat-jpda");
- expectedPortNames.add("codeserver");
-
- // When
- List servicePorts =
- KubernetesService.getServicePortsFrom(exposedPorts.keySet(), portsToRefName);
- List actualPortNames =
- servicePorts.stream().map(p -> p.getName()).collect(Collectors.toList());
-
- // Then
- assertTrue(actualPortNames.stream().allMatch(expectedPortNames::contains));
- }
-
- @Test
- public void shouldReturnServicePortNameWhenUnknownPortNumberIsProvided() {
- // Given
- Map> exposedPorts = new HashMap<>();
- exposedPorts.put("55/tcp", null);
- Map portsToRefName = new HashMap<>();
- portsToRefName.put("8080/tcp", "tomcat");
-
- Set expectedPortNames = new HashSet<>();
- expectedPortNames.add("server-55-tcp");
-
- // When
- List servicePorts =
- KubernetesService.getServicePortsFrom(exposedPorts.keySet(), portsToRefName);
- List actualPortNames =
- servicePorts.stream().map(p -> p.getName()).collect(Collectors.toList());
-
- // Then
- assertTrue(actualPortNames.stream().allMatch(expectedPortNames::contains));
- }
-}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesStringUtilsTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesStringUtilsTest.java
deleted file mode 100644
index c00587c1a3..0000000000
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesStringUtilsTest.java
+++ /dev/null
@@ -1,228 +0,0 @@
-/*
- * Copyright (c) 2012-2017 Red Hat, Inc.
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Red Hat, Inc. - initial API and implementation
- */
-package org.eclipse.che.plugin.openshift.client.kubernetes;
-
-import static org.testng.Assert.assertEquals;
-import static org.testng.Assert.assertTrue;
-
-import org.apache.commons.lang.RandomStringUtils;
-import org.testng.annotations.Test;
-
-public class KubernetesStringUtilsTest {
-
- @Test
- public void getNormalizedStringShouldTrimLongStrings() {
- // Given
- String input = RandomStringUtils.random(70, true, true);
- String expected = input.substring(0, 62);
-
- // When
- String output = KubernetesStringUtils.getNormalizedString(input);
-
- // Then
- assertEquals(output, expected, "getNormalizedString should limit string length");
- }
-
- @Test
- public void getNormalizedStringShouldDoNothingWithShortStrings() {
- // Given
- String input = RandomStringUtils.random(24, true, true);
- String expected = input;
-
- // When
- String output = KubernetesStringUtils.getNormalizedString(input);
-
- // Then
- assertEquals(output, expected, "getNormalizedString should do nothing to short strings");
- }
-
- @Test
- public void convertPullSpecToImageStreamNameShouldTrimTag() {
- // Given
- String input = "testImage:testTag";
- String expected = "testImage";
-
- // When
- String output = KubernetesStringUtils.convertPullSpecToImageStreamName(input);
-
- // Then
- assertEquals(output, expected, "Should trim tag off pull spec");
- }
-
- @Test
- public void convertPullSpecToImageStreamNameShouldBeValidOpenShiftName() {
- // Given
- String input = "eclipse/ubuntu_jdk8";
-
- // When
- String output = KubernetesStringUtils.convertPullSpecToImageStreamName(input);
-
- // Then
- assertTrue(!output.contains("/"), "Should remove invalid chars from ImageStream name");
- }
-
- @Test
- public void converPullSpecToImageStreamNameShouldLimitLength() {
- // Given
- String input = RandomStringUtils.random(100, true, false);
-
- // When
- String output = KubernetesStringUtils.convertPullSpecToImageStreamName(input);
-
- // Then
- assertTrue(output.length() < 64, "ImageStream name cannot be over 63 chars");
- }
-
- @Test
- public void convertPullSpecToTagNameShouldIgnoreRegistryAndTag() {
- // Given
- String inputWithRegistry = "registry/organisation/image:tag";
- String inputWithoutRegistry = "image";
-
- // When
- String outputWithRegistry = KubernetesStringUtils.convertPullSpecToTagName(inputWithRegistry);
- String outputWithoutRegistry =
- KubernetesStringUtils.convertPullSpecToTagName(inputWithoutRegistry);
-
- // Then
- assertEquals(
- outputWithoutRegistry,
- outputWithRegistry,
- "Converting pull spec to tag name should only use image name");
- }
-
- @Test
- public void convertPullSpecToTagNameShouldLimitLength() {
- // Given
- String input = RandomStringUtils.random(100, true, false);
-
- // When
- String output = KubernetesStringUtils.convertPullSpecToTagName(input);
-
- // Then
- assertTrue(output.length() < 63, "ImageStream tag cannot be over 63 chars");
- }
-
- @Test
- public void createImageStreamTagNameShouldConvertNameInSameWayAsConvertPullSpec() {
- // Given
- String inputOldRepo = "eclipse/ubuntu_jdk8";
- String inputNewRepo = "eclipse-che/che-workspace_" + RandomStringUtils.random(20);
- String expectedImageStreamName =
- KubernetesStringUtils.convertPullSpecToImageStreamName(inputOldRepo);
-
- // When
- String rawOutput = KubernetesStringUtils.createImageStreamTagName(inputOldRepo, inputNewRepo);
-
- // Then
- assertTrue(rawOutput.contains(":"), "ImageStreamTag name is invalid: must contain ':'");
- String outputImageStreamName = rawOutput.split(":")[0];
- assertEquals(
- outputImageStreamName,
- expectedImageStreamName,
- "ImageStreamName should match output of convertPullSpecToImageStreamName");
- }
-
- @Test
- public void createImageStreamTagNameShouldConvertTagInSameWayAsConvertPullSpec() {
- // Given
- String inputOldRepo = "eclipse/ubuntu_jdk8";
- String inputNewRepo = "eclipse-che/che-workspace_" + RandomStringUtils.random(20);
- String expectedTagName = KubernetesStringUtils.convertPullSpecToTagName(inputNewRepo);
-
- // When
- String rawOutput = KubernetesStringUtils.createImageStreamTagName(inputOldRepo, inputNewRepo);
-
- // Then
- assertTrue(rawOutput.contains(":"), "ImageStreamTag name is invalid: must contain ':'");
- String outputImageStreamName = rawOutput.split(":")[1];
- assertEquals(
- outputImageStreamName,
- expectedTagName,
- "ImageStream Tag should match output of convertPullSpecToTagName");
- }
-
- @Test
- public void createImageStreamTagNameShouldLimitLengthOfCreatedTag() {
- // Given
- String inputOldRepo = RandomStringUtils.random(50, true, false);
- String inputNewRepo = RandomStringUtils.random(50, true, false);
-
- // When
- String output = KubernetesStringUtils.createImageStreamTagName(inputOldRepo, inputNewRepo);
-
- // Then
- assertTrue(output.length() < 63, "ImageStreamTags must be shorter than 63 characters");
- }
-
- @Test
- public void getImageStreamNameFromPullSpecShouldReturnOnlyImageName() {
- // Given
- String input = "registry/organisation/imagename:tagname";
- String expected = "imagename";
-
- // When
- String output = KubernetesStringUtils.getImageStreamNameFromPullSpec(input);
-
- // Then
- assertEquals(output, expected);
- }
-
- @Test
- public void stripTagFromPullSpecShouldRemoveTag() {
- // Given
- String input = "registry/organisation/imagename:tagname";
- String expected = "registry/organisation/imagename";
-
- // When
- String output = KubernetesStringUtils.stripTagFromPullSpec(input);
-
- // Then
- assertEquals(output, expected);
- }
-
- @Test
- public void stripTagFromPullSpecShouldDoNothingIfNoTag() {
- // Given
- String input = "registry/organisation/imagename";
-
- // When
- String output = KubernetesStringUtils.stripTagFromPullSpec(input);
-
- // Then
- assertEquals(output, input);
- }
-
- @Test
- public void getTagNameFromPullSpecShouldReturnTag() {
- // Given
- String input = "registry/organisation/imagename:tagname";
- String expected = "tagname";
-
- // When
- String output = KubernetesStringUtils.getTagNameFromPullSpec(input);
-
- // Then
- assertEquals(output, expected);
- }
-
- @Test
- public void getTagNameFromPullSpecShouldReturnNullWhenPullSpecDoesNotHaveTag() {
- // Given
- String input = "registry/organisation/imagename";
-
- // When
- String output = KubernetesStringUtils.getTagNameFromPullSpec(input);
-
- // Then
- assertEquals(output, null);
- }
-}
diff --git a/plugins/plugin-docker/pom.xml b/plugins/plugin-docker/pom.xml
deleted file mode 100644
index c9b4b86a66..0000000000
--- a/plugins/plugin-docker/pom.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-
-
-
- 4.0.0
-
- che-plugin-parent
- org.eclipse.che.plugin
- 5.20.0-SNAPSHOT
- ../pom.xml
-
- che-plugin-docker-parent
- pom
- Che Plugin :: Docker :: Parent
-
- che-plugin-docker-client
- che-plugin-openshift-client
-
-
- ${project.build.directory}/generated-sources/dto/
- true
-
-
-
-
-
- org.eclipse.che.core
- che-core-api-dto-maven-plugin
- ${project.version}
-
-
-
-
-
-
diff --git a/plugins/plugin-machine/che-plugin-machine-ext-server/pom.xml b/plugins/plugin-machine/che-plugin-machine-ext-server/pom.xml
index a3afea4748..a2eef0aae2 100644
--- a/plugins/plugin-machine/che-plugin-machine-ext-server/pom.xml
+++ b/plugins/plugin-machine/che-plugin-machine-ext-server/pom.xml
@@ -53,8 +53,8 @@
che-core-api-workspace-shared