vms = new ArrayList<>();
- for (String volume : volumes) {
- String mountPath = volume.split(":",3)[1];
- String volumeName = getVolumeName(volume);
+ private void createWorkspaceDir(String[] volumes) throws OpenShiftException {
+ PersistentVolumeClaim pvc = getClaimCheWorkspace();
+ String workspaceSubpath = getWorkspaceSubpath(volumes);
+ if (pvc != null && !isNullOrEmpty(workspaceSubpath)) {
+ LOG.info("Making sure directory exists for workspace {}", workspaceSubpath);
+ boolean succeeded = openShiftPvcHelper.createJobPod(workspacesPersistentVolumeClaim,
+ openShiftCheProjectName,
+ "create-",
+ OpenShiftPvcHelper.Command.MAKE,
+ workspaceSubpath);
+ if (!succeeded) {
+ LOG.error("Failed to create workspace directory {} in PVC {}", workspaceSubpath,
+ workspacesPersistentVolumeClaim);
+ throw new OpenShiftException("Failed to create workspace directory in PVC");
+ }
+ }
+ }
- VolumeMount vm = new VolumeMountBuilder()
- .withMountPath(mountPath)
- .withName("ws-" + workspaceID + "-" + volumeName)
+ /**
+ * Gets the workspace subpath from an array of volumes. Since volumes provided are
+ * those used when running Che in Docker, most of the volume spec is ignored; this
+ * method returns the subpath within the hostpath that refers to the workspace.
+ *
+ * E.g. for a volume {@code /data/workspaces/wksp-8z00:/projects:Z}, this method will return
+ * "wksp-8z00".
+ *
+ * @param volumes
+ * @return
+ */
+ private String getWorkspaceSubpath(String[] volumes) {
+ String workspaceSubpath = null;
+ for (String volume : volumes) {
+ // Volumes are structured ::.
+ // We first check that matches the mount path for projects
+ // and then extract the hostpath directory. The first part of the volume
+ // String will be structured /workspaceName.
+ String mountPath = volume.split(":", 3)[1];
+ if (cheWorkspaceProjectsStorage.equals(mountPath)) {
+ workspaceSubpath = volume.split(":", 3)[0].replaceAll(cheWorkspaceStorage, "");
+ if (workspaceSubpath.startsWith("/")) {
+ workspaceSubpath = workspaceSubpath.substring(1);
+ }
+ }
+ }
+ return workspaceSubpath;
+ }
+
+ private List getVolumeMountsFrom(String[] volumes) {
+ List vms = new ArrayList<>();
+ PersistentVolumeClaim pvc = getClaimCheWorkspace();
+ if (pvc != null) {
+ String subPath = getWorkspaceSubpath(volumes);
+ if (subPath != null) {
+ VolumeMount vm = new VolumeMountBuilder()
+ .withMountPath(cheWorkspaceProjectsStorage)
+ .withName(workspacesPersistentVolumeClaim)
+ .withSubPath(subPath)
.build();
- vms.add(vm);
+ vms.add(vm);
+ }
}
return vms;
}
- private List getVolumesFrom(String[] volumes, String workspaceID) {
+ private List getVolumesFrom(String[] volumes) {
List vs = new ArrayList<>();
- for (String volume : volumes) {
- String hostPath = volume.split(":",3)[0];
- String volumeName = getVolumeName(volume);
-
- Volume v = new VolumeBuilder()
- .withNewHostPath(hostPath)
- .withName("ws-" + workspaceID + "-" + volumeName)
- .build();
- vs.add(v);
+ PersistentVolumeClaim pvc = getClaimCheWorkspace();
+ if (pvc != null) {
+ for (String volume : volumes) {
+ String mountPath = volume.split(":",3)[1];
+ if (cheWorkspaceProjectsStorage.equals(mountPath)) {
+ PersistentVolumeClaimVolumeSource pvcs = new PersistentVolumeClaimVolumeSourceBuilder()
+ .withClaimName(workspacesPersistentVolumeClaim)
+ .build();
+ Volume v = new VolumeBuilder()
+ .withPersistentVolumeClaim(pvcs)
+ .withName(workspacesPersistentVolumeClaim)
+ .build();
+ vs.add(v);
+ }
+ }
}
return vs;
}
- private String getVolumeName(String volume) {
- if (volume.contains("ws-agent")) {
- return "wsagent-lib";
+ private PersistentVolumeClaim getClaimCheWorkspace() {
+ try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
+ PersistentVolumeClaimList pvcList = openShiftClient.persistentVolumeClaims().inNamespace(openShiftCheProjectName).list();
+ for(PersistentVolumeClaim pvc: pvcList.getItems()) {
+ if (workspacesPersistentVolumeClaim.equals(pvc.getMetadata().getName())) {
+ return pvc;
+ }
+ }
+ Map requests = new HashMap<>();
+ requests.put("storage", new Quantity(workspacesPvcQuantity));
+ Map annotations = Collections.singletonMap(OPENSHIFT_VOLUME_STORAGE_CLASS, OPENSHIFT_VOLUME_STORAGE_CLASS_NAME);
+ PersistentVolumeClaim pvc = new PersistentVolumeClaimBuilder()
+ .withNewMetadata()
+ .withName(workspacesPersistentVolumeClaim)
+ .withAnnotations(annotations)
+ .endMetadata()
+ .withNewSpec()
+ .withAccessModes("ReadWriteOnce")
+ .withNewResources()
+ .withRequests(requests)
+ .endResources()
+ .endSpec()
+ .build();
+ pvc = openShiftClient.persistentVolumeClaims().inNamespace(openShiftCheProjectName).create(pvc);
+ LOG.info("Creating OpenShift PVC {}", pvc.getMetadata().getName());
+ return pvc;
}
-
- if (volume.contains("terminal")) {
- return "terminal";
- }
-
- if (volume.contains("workspaces")) {
- return "project";
- }
-
- return "unknown-volume";
}
private String waitAndRetrieveContainerID(String deploymentName) throws IOException {
- for (int i = 0; i < OPENSHIFT_WAIT_POD_TIMEOUT; i++) {
- try {
- Thread.sleep(OPENSHIFT_WAIT_POD_DELAY);
- } catch (InterruptedException ex) {
- Thread.currentThread().interrupt();
- }
+ try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
+ for (int i = 0; i < OPENSHIFT_WAIT_POD_TIMEOUT; i++) {
+ try {
+ Thread.sleep(OPENSHIFT_WAIT_POD_DELAY);
+ } catch (InterruptedException ex) {
+ Thread.currentThread().interrupt();
+ }
+
+ List pods = openShiftClient.pods()
+ .inNamespace(this.openShiftCheProjectName)
+ .withLabel(OPENSHIFT_DEPLOYMENT_LABEL, deploymentName)
+ .list()
+ .getItems();
+
+ if (pods.size() < 1) {
+ throw new OpenShiftException(String.format("Pod with deployment name %s not found",
+ deploymentName));
+ } else if (pods.size() > 1) {
+ throw new OpenShiftException(String.format("Multiple pods with deployment name %s found",
+ deploymentName));
+ }
- List pods = openShiftClient.pods()
- .inNamespace(this.openShiftCheProjectName)
- .withLabel(OPENSHIFT_DEPLOYMENT_LABEL, deploymentName)
- .list()
- .getItems();
-
- if (pods.size() < 1) {
- throw new OpenShiftException(String.format("Pod with deployment name %s not found",
- deploymentName));
- } else if (pods.size() > 1) {
- throw new OpenShiftException(String.format("Multiple pods with deployment name %s found",
- deploymentName));
- }
-
- Pod pod = pods.get(0);
- String status = pod.getStatus().getPhase();
- if (OPENSHIFT_POD_STATUS_RUNNING.equals(status)) {
- String containerID = pod.getStatus().getContainerStatuses().get(0).getContainerID();
- String normalizedID = KubernetesStringUtils.normalizeContainerID(containerID);
- openShiftClient.pods()
- .inNamespace(openShiftCheProjectName)
- .withName(pod.getMetadata().getName())
- .edit()
- .editMetadata()
- .addToLabels(CHE_CONTAINER_IDENTIFIER_LABEL_KEY,
- KubernetesStringUtils.getLabelFromContainerID(normalizedID))
- .endMetadata()
- .done();
- return normalizedID;
+ Pod pod = pods.get(0);
+ String status = pod.getStatus().getPhase();
+ if (OPENSHIFT_POD_STATUS_RUNNING.equals(status)) {
+ String containerID = pod.getStatus().getContainerStatuses().get(0).getContainerID();
+ String normalizedID = KubernetesStringUtils.normalizeContainerID(containerID);
+ openShiftClient.pods()
+ .inNamespace(openShiftCheProjectName)
+ .withName(pod.getMetadata().getName())
+ .edit()
+ .editMetadata()
+ .addToLabels(CHE_CONTAINER_IDENTIFIER_LABEL_KEY,
+ KubernetesStringUtils.getLabelFromContainerID(normalizedID))
+ .endMetadata()
+ .done();
+ return normalizedID;
+ }
}
+ return null;
}
- return null;
}
/**
@@ -1127,19 +1654,6 @@ public class OpenShiftConnector extends DockerConnector {
return exposedPorts;
}
- /**
- * When container is expected to be run as root, user field from {@link ImageConfig} is empty.
- * For non-root user it contains "user" value
- *
- * @param imageName
- * @return true if user property from Image config is empty string, false otherwise
- * @throws IOException
- */
- private boolean runContainerAsRoot(final String imageName) throws IOException {
- String user = inspectImage(InspectImageParams.create(imageName)).getConfig().getUser();
- return user != null && user.isEmpty();
- }
-
/**
* @param exposedPorts
* @return true if machine exposes 4411/tcp port used by Terminal agent,
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftPvcHelper.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftPvcHelper.java
new file mode 100644
index 0000000000..8bf5af0116
--- /dev/null
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftPvcHelper.java
@@ -0,0 +1,232 @@
+/*******************************************************************************
+ * Copyright (c) 2012-2017 Red Hat, Inc.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Red Hat, Inc. - initial API and implementation
+ *******************************************************************************/
+
+package org.eclipse.che.plugin.openshift.client;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import io.fabric8.kubernetes.api.model.Container;
+import io.fabric8.kubernetes.api.model.ContainerBuilder;
+import io.fabric8.kubernetes.api.model.PersistentVolumeClaimVolumeSource;
+import io.fabric8.kubernetes.api.model.PersistentVolumeClaimVolumeSourceBuilder;
+import io.fabric8.kubernetes.api.model.Pod;
+import io.fabric8.kubernetes.api.model.PodBuilder;
+import io.fabric8.kubernetes.api.model.Quantity;
+import io.fabric8.kubernetes.api.model.Volume;
+import io.fabric8.kubernetes.api.model.VolumeBuilder;
+import io.fabric8.kubernetes.api.model.VolumeMount;
+import io.fabric8.kubernetes.api.model.VolumeMountBuilder;
+import io.fabric8.openshift.client.DefaultOpenShiftClient;
+import io.fabric8.openshift.client.OpenShiftClient;
+
+/**
+ * Helper class for executing simple commands in a Persistent Volume on Openshift.
+ *
+ * Creates a short-lived Pod using a CentOS image which mounts a specified PVC and
+ * executes a command (either {@code mkdir -p } or {@code rm -rf
+ * For mkdir commands, an in-memory list of created workspaces is stored and used to avoid
+ * calling mkdir unnecessarily. However, this list is not persisted, so dir creation is
+ * not tracked between restarts.
+ *
+ * @author amisevsk
+ */
+public class OpenShiftPvcHelper {
+
+ private static final Logger LOG = LoggerFactory.getLogger(OpenShiftPvcHelper.class);
+
+ private static final String POD_PHASE_SUCCEEDED = "Succeeded";
+ private static final String POD_PHASE_FAILED = "Failed";
+ private static final String[] MKDIR_WORKSPACE_COMMAND = new String[] {"mkdir", "-p"};
+ private static final String[] RMDIR_WORKSPACE_COMMAND = new String[] {"rm", "-rf"};
+
+ private static final Set createdWorkspaces = ConcurrentHashMap.newKeySet();
+
+ private final String jobImage;
+ private final String jobMemoryLimit;
+
+ protected enum Command {REMOVE, MAKE}
+
+ @Inject
+ protected OpenShiftPvcHelper(@Named("che.openshift.jobs.image") String jobImage,
+ @Named("che.openshift.jobs.memorylimit") String jobMemoryLimit) {
+ this.jobImage = jobImage;
+ this.jobMemoryLimit = jobMemoryLimit;
+ }
+
+ /**
+ * Creates a pod with {@code command} and reports whether it succeeded
+ * @param workspacesPvcName
+ * name of the PVC to mount
+ * @param projectNamespace
+ * OpenShift namespace
+ * @param jobNamePrefix
+ * prefix used for pod metadata name. Name structure will normally
+ * be {@code } if only one path is passed, or
+ * {@code batch} if multiple paths are provided
+ * @param command
+ * command to execute in PVC.
+ * @param workspaceDirs
+ * list of arguments attached to command. A list of directories to
+ * create/delete.
+ * @return true if Pod terminates with phase "Succeeded" or mkdir command issued
+ * for already created worksapce, false otherwise.
+ *
+ * @see Command
+ */
+ protected boolean createJobPod(String workspacesPvcName,
+ String projectNamespace,
+ String jobNamePrefix,
+ Command command,
+ String... workspaceDirs) {
+
+ if (workspaceDirs.length == 0) {
+ return true;
+ }
+
+ if (Command.MAKE.equals(command)) {
+ String[] dirsToCreate = filterDirsToCreate(workspaceDirs);
+ if (dirsToCreate.length == 0) {
+ return true;
+ }
+ workspaceDirs = dirsToCreate;
+ }
+
+ VolumeMount vm = new VolumeMountBuilder()
+ .withMountPath("/projects")
+ .withName(workspacesPvcName)
+ .build();
+
+ PersistentVolumeClaimVolumeSource pvcs = new PersistentVolumeClaimVolumeSourceBuilder()
+ .withClaimName(workspacesPvcName)
+ .build();
+
+ Volume volume = new VolumeBuilder()
+ .withPersistentVolumeClaim(pvcs)
+ .withName(workspacesPvcName)
+ .build();
+
+ String[] jobCommand = getCommand(command, "/projects/", workspaceDirs);
+ LOG.info("Executing command {} in PVC {} for {} dirs", jobCommand[0], workspacesPvcName, workspaceDirs.length);
+
+ Map limit = Collections.singletonMap("memory", new Quantity(jobMemoryLimit));
+
+ String podName = workspaceDirs.length > 1 ? jobNamePrefix + "batch"
+ : jobNamePrefix + workspaceDirs[0];
+
+ Container container = new ContainerBuilder().withName(podName)
+ .withImage(jobImage)
+ .withImagePullPolicy("IfNotPresent")
+ .withNewSecurityContext()
+ .withPrivileged(false)
+ .endSecurityContext()
+ .withCommand(jobCommand)
+ .withVolumeMounts(vm)
+ .withNewResources()
+ .withLimits(limit)
+ .endResources()
+ .build();
+
+ Pod podSpec = new PodBuilder().withNewMetadata()
+ .withName(podName)
+ .endMetadata()
+ .withNewSpec()
+ .withContainers(container)
+ .withVolumes(volume)
+ .withRestartPolicy("Never")
+ .endSpec()
+ .build();
+
+
+ try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()){
+ openShiftClient.pods().inNamespace(projectNamespace).create(podSpec);
+ boolean completed = false;
+ while(!completed) {
+ Pod pod = openShiftClient.pods().inNamespace(projectNamespace).withName(podName).get();
+ String phase = pod.getStatus().getPhase();
+ switch (phase) {
+ case POD_PHASE_FAILED:
+ LOG.info("Pod command {} failed", Arrays.toString(jobCommand));
+ case POD_PHASE_SUCCEEDED:
+ openShiftClient.resource(pod).delete();
+ updateCreatedDirs(command, phase, workspaceDirs);
+ return POD_PHASE_SUCCEEDED.equals(phase);
+ default:
+ Thread.sleep(1000);
+ }
+ }
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ return false;
+ }
+
+ private String[] getCommand(Command commandType, String mountPath, String... dirs) {
+ String[] command = new String[0];
+ switch (commandType) {
+ case MAKE :
+ command = MKDIR_WORKSPACE_COMMAND;
+ break;
+ case REMOVE :
+ command = RMDIR_WORKSPACE_COMMAND;
+ break;
+ }
+
+ String[] dirsWithPath = Arrays.asList(dirs).stream()
+ .map(dir -> mountPath + dir)
+ .toArray(String[]::new);
+
+ String[] fullCommand = new String[command.length + dirsWithPath.length];
+
+ System.arraycopy(command, 0, fullCommand, 0, command.length);
+ System.arraycopy(dirsWithPath, 0, fullCommand, command.length, dirsWithPath.length);
+ return fullCommand;
+ }
+
+ private void updateCreatedDirs(Command command, String phase, String... workspaceDirs) {
+ if (!POD_PHASE_SUCCEEDED.equals(phase)) {
+ return;
+ }
+ List dirs = Arrays.asList(workspaceDirs);
+ switch (command) {
+ case MAKE:
+ createdWorkspaces.addAll(dirs);
+ break;
+ case REMOVE:
+ createdWorkspaces.removeAll(dirs);
+ break;
+ }
+ }
+
+ private String[] filterDirsToCreate(String[] allDirs) {
+ List dirs = Arrays.asList(allDirs);
+ List dirsToCreate = new ArrayList<>();
+ for(String dir : dirs) {
+ if (!createdWorkspaces.contains(dir)) {
+ dirsToCreate.add(dir);
+ }
+ }
+ return dirsToCreate.toArray(new String[dirsToCreate.size()]);
+ }
+}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftRouteCreator.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftRouteCreator.java
new file mode 100644
index 0000000000..05a7034ef2
--- /dev/null
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftRouteCreator.java
@@ -0,0 +1,83 @@
+/*******************************************************************************
+ * Copyright (c) 2012-2017 Red Hat, Inc.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Red Hat, Inc. - initial API and implementation
+ *******************************************************************************/
+package org.eclipse.che.plugin.openshift.client;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import io.fabric8.openshift.api.model.DoneableRoute;
+import io.fabric8.openshift.api.model.Route;
+import io.fabric8.openshift.api.model.RouteFluent.SpecNested;
+import io.fabric8.openshift.client.DefaultOpenShiftClient;
+import io.fabric8.openshift.client.OpenShiftClient;
+
+public class OpenShiftRouteCreator {
+ private static final Logger LOG = LoggerFactory.getLogger(OpenShiftRouteCreator.class);
+ private static final String TLS_TERMINATION_EDGE = "edge";
+ private static final String REDIRECT_INSECURE_EDGE_TERMINATION_POLICY = "Redirect";
+
+ public static void createRoute (final String namespace,
+ final String openShiftNamespaceExternalAddress,
+ final String serverRef,
+ final String serviceName,
+ final String deploymentName,
+ final String routeId,
+ final boolean enableTls) {
+
+ if (openShiftNamespaceExternalAddress == null) {
+ throw new IllegalArgumentException("Property che.docker.ip.external must be set when using openshift.");
+ }
+
+ try (OpenShiftClient openShiftClient = new DefaultOpenShiftClient()) {
+ String routeName = generateRouteName(routeId, serverRef);
+ String serviceHost = generateRouteHost(routeName, openShiftNamespaceExternalAddress);
+
+ SpecNested routeSpec = openShiftClient
+ .routes()
+ .inNamespace(namespace)
+ .createNew()
+ .withNewMetadata()
+ .withName(routeName)
+ .addToLabels(OpenShiftConnector.OPENSHIFT_DEPLOYMENT_LABEL, deploymentName)
+ .endMetadata()
+ .withNewSpec()
+ .withHost(serviceHost)
+ .withNewTo()
+ .withKind("Service")
+ .withName(serviceName)
+ .endTo()
+ .withNewPort()
+ .withNewTargetPort()
+ .withStrVal(serverRef)
+ .endTargetPort()
+ .endPort();
+
+ if (enableTls) {
+ routeSpec.withNewTls()
+ .withTermination(TLS_TERMINATION_EDGE)
+ .withInsecureEdgeTerminationPolicy(REDIRECT_INSECURE_EDGE_TERMINATION_POLICY)
+ .endTls();
+ }
+
+ Route route = routeSpec.endSpec().done();
+
+ LOG.info("OpenShift route {} created", route.getMetadata().getName());
+ }
+ }
+
+ private static String generateRouteName(final String serviceName, final String serverRef) {
+ return serverRef + "-" + serviceName;
+ }
+
+ private static String generateRouteHost(final String routeName, final String openShiftNamespaceExternalAddress) {
+ return routeName + "-" + openShiftNamespaceExternalAddress;
+ }
+}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleaner.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleaner.java
new file mode 100644
index 0000000000..85afdad7e1
--- /dev/null
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleaner.java
@@ -0,0 +1,107 @@
+/*******************************************************************************
+ * Copyright (c) 2012-2017 Red Hat, Inc.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Red Hat, Inc. - initial API and implementation
+ *******************************************************************************/
+
+package org.eclipse.che.plugin.openshift.client;
+
+import static com.google.common.base.Strings.isNullOrEmpty;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+import javax.inject.Inject;
+import javax.inject.Named;
+import javax.inject.Singleton;
+
+import org.eclipse.che.api.core.ServerException;
+import org.eclipse.che.api.core.event.ServerIdleEvent;
+import org.eclipse.che.api.core.model.workspace.Workspace;
+import org.eclipse.che.api.core.notification.EventService;
+import org.eclipse.che.api.core.notification.EventSubscriber;
+import org.eclipse.che.api.workspace.server.WorkspaceFilesCleaner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * Class used to remove workspace directories in Persistent Volume when a workspace
+ * is delete while running on OpenShift. Deleted workspace directories are stored
+ * in a list. Upon Che server idling, all of these workspaces are deleted simultaneously
+ * from the PVC using a {@link OpenShiftPvcHelper} job.
+ *
+ * Since deleting a workspace does not immediately remove its files, re-creating a workspace
+ * with a previously used name can result in files from the previous workspace still being
+ * present.
+ *
+ * @see WorkspaceFilesCleaner
+ * @author amisevsk
+ */
+@Singleton
+public class OpenShiftWorkspaceFilesCleaner implements WorkspaceFilesCleaner {
+
+ private static final Logger LOG = LoggerFactory.getLogger(OpenShiftConnector.class);
+ private static final Set deleteQueue = ConcurrentHashMap.newKeySet();
+ private final String projectNamespace;
+ private final String workspacesPvcName;
+ private final OpenShiftPvcHelper openShiftPvcHelper;
+
+ @Inject
+ public OpenShiftWorkspaceFilesCleaner(EventService eventService,
+ OpenShiftPvcHelper openShiftPvcHelper,
+ @Named("che.openshift.project") String projectNamespace,
+ @Named("che.openshift.workspaces.pvc.name") String workspacesPvcName) {
+ this.projectNamespace = projectNamespace;
+ this.workspacesPvcName = workspacesPvcName;
+ this.openShiftPvcHelper = openShiftPvcHelper;
+ eventService.subscribe(new EventSubscriber() {
+ @Override
+ public void onEvent(ServerIdleEvent event) {
+ deleteWorkspacesInQueue(event);
+ }
+ });
+ }
+
+ @Override
+ public void clear(Workspace workspace) throws IOException, ServerException {
+ String workspaceName = workspace.getConfig().getName();
+ if (isNullOrEmpty(workspaceName)) {
+ LOG.error("Could not get workspace name for files removal.");
+ return;
+ }
+ deleteQueue.add(workspaceName);
+ }
+
+ private void deleteWorkspacesInQueue(ServerIdleEvent event) {
+ List deleteQueueCopy = new ArrayList<>(deleteQueue);
+ String[] dirsToDelete = deleteQueueCopy.toArray(new String[deleteQueueCopy.size()]);
+
+ LOG.info("Deleting {} workspaces on PVC {}", deleteQueueCopy.size(), workspacesPvcName);
+ boolean successful = openShiftPvcHelper.createJobPod(workspacesPvcName,
+ projectNamespace,
+ "delete-",
+ OpenShiftPvcHelper.Command.REMOVE,
+ dirsToDelete);
+ if (successful) {
+ deleteQueue.removeAll(deleteQueueCopy);
+ }
+ }
+
+ /**
+ * Clears the list of workspace directories to be deleted. Necessary for testing.
+ */
+ @VisibleForTesting
+ protected static void clearDeleteQueue() {
+ deleteQueue.clear();
+ }
+}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainer.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainer.java
index dc4c0a6680..a37b59fc56 100644
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainer.java
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainer.java
@@ -47,7 +47,7 @@ public final class KubernetesContainer {
int portNumber = Integer.parseInt(port);
String portName = CheServicePorts.get().get(portNumber);
- portName = isNullOrEmpty(portName) ? exposedPort.replace("/", "-") : portName;
+ portName = isNullOrEmpty(portName) ? "server-" + exposedPort.replace("/", "-") : portName;
ContainerPort containerPort = new ContainerPortBuilder().withName(portName).withProtocol(protocol)
.withContainerPort(portNumber).build();
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesExecHolder.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesExecHolder.java
new file mode 100644
index 0000000000..ad0af92e11
--- /dev/null
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesExecHolder.java
@@ -0,0 +1,56 @@
+/*******************************************************************************
+ * Copyright (c) 2012-2017 Red Hat, Inc.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Red Hat, Inc. - initial API and implementation
+ *******************************************************************************/
+
+package org.eclipse.che.plugin.openshift.client.kubernetes;
+
+import java.util.Arrays;
+
+import org.eclipse.che.plugin.openshift.client.OpenShiftConnector;
+
+/**
+ * Holder class for metadata about an exec, to be used with {@link OpenShiftConnector}.
+ *
+ * In OpenShift, {@code createExec()} is not separate from {@code startExec()},
+ * so this class has to be used to pass data between {@code createExec()} and
+ * {@code startExec()} calls.
+ *
+ * @see OpenShiftConnector#createExec(org.eclipse.che.plugin.docker.client.params.CreateExecParams)
+ * @see OpenShiftConnector#startExec(org.eclipse.che.plugin.docker.client.params.StartExecParams, org.eclipse.che.plugin.docker.client.MessageProcessor)
+ */
+public class KubernetesExecHolder {
+
+ private String[] command;
+ private String podName;
+
+ public KubernetesExecHolder withCommand(String[] command) {
+ this.command = command;
+ return this;
+ }
+
+ public KubernetesExecHolder withPod(String podName) {
+ this.podName = podName;
+ return this;
+ }
+
+ public String[] getCommand() {
+ return command;
+ }
+
+ public String getPod() {
+ return podName;
+ }
+
+ public String toString() {
+ return String.format("KubernetesExecHolder {command=%s, podName=%s}",
+ Arrays.asList(command).toString(),
+ podName);
+ }
+}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverter.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverter.java
index e499117dec..2931e36ab5 100644
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverter.java
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverter.java
@@ -64,6 +64,9 @@ public final class KubernetesLabelConverter {
*/
public static Map labelsToNames(Map labels) {
Map names = new HashMap<>();
+ if (labels == null) {
+ return names;
+ }
for (Map.Entry label : labels.entrySet()) {
if (!hasConversionProblems(label)) {
@@ -103,6 +106,9 @@ public final class KubernetesLabelConverter {
*/
public static Map namesToLabels(Map names) {
Map labels = new HashMap<>();
+ if (names == null) {
+ return labels;
+ }
for (Map.Entry entry: names.entrySet()){
String key = entry.getKey();
String value = entry.getValue();
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapter.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapter.java
new file mode 100644
index 0000000000..2d87a77053
--- /dev/null
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapter.java
@@ -0,0 +1,76 @@
+/*******************************************************************************
+ * Copyright (c) 2012-2017 Red Hat, Inc.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Red Hat, Inc. - initial API and implementation
+ *******************************************************************************/
+
+package org.eclipse.che.plugin.openshift.client.kubernetes;
+
+import io.fabric8.kubernetes.client.Callback;
+import io.fabric8.kubernetes.client.utils.InputStreamPumper;
+
+import org.eclipse.che.commons.annotation.Nullable;
+import org.eclipse.che.plugin.docker.client.LogMessage;
+import org.eclipse.che.plugin.docker.client.MessageProcessor;
+
+/**
+ * Adapter class for passing data from a {@code kubernetes-client} output stream (e.g.
+ * for an exec call) to {@link MessageProcessor}. This class should be passed to a
+ * {@link InputStreamPumper} along with the output of the exec call.
+ *
+ * Output passed in via the {@link #call(byte[])} method is parsed into lines,
+ * (respecting {@code '\n'} and {@code CRLF} as line separators), and
+ * passed to the {@link MessageProcessor} as {@link LogMessage}s.
+ */
+public class KubernetesOutputAdapter implements Callback {
+
+ private LogMessage.Type type;
+ private MessageProcessor execOutputProcessor;
+ private StringBuilder lineBuffer;
+
+ /**
+ * Create a new KubernetesOutputAdapter
+ *
+ * @param type
+ * the type of LogMessages being passed to the MessageProcessor
+ * @param processor
+ * the processor receiving LogMessages. If null, calling {@link #call(byte[])}
+ * will return immediately.
+ */
+ public KubernetesOutputAdapter(LogMessage.Type type,
+ @Nullable MessageProcessor processor) {
+ this.type = type;
+ this.execOutputProcessor = processor;
+ this.lineBuffer = new StringBuilder();
+ }
+
+ @Override
+ public void call(byte[] data) {
+ if (data == null || data.length == 0 || execOutputProcessor == null) {
+ return;
+ }
+ int start = 0;
+ int offset = 0;
+
+ for (int pos = 0; pos < data.length; pos++) {
+ if (data[pos] == '\n' || data[pos] == '\r') {
+ offset = pos - start;
+ String line = new String(data, start, offset);
+ lineBuffer.append(line);
+ execOutputProcessor.process(new LogMessage(type, lineBuffer.toString()));
+ lineBuffer.setLength(0);
+ if (data[pos] == '\r') {
+ pos += 1;
+ }
+ start = pos + 1;
+ }
+ }
+ String trailingChars = new String(data, start, data.length - start);
+ lineBuffer.append(trailingChars);
+ }
+}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesService.java b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesService.java
index 33e62b16e5..df179410df 100644
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesService.java
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/main/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesService.java
@@ -47,7 +47,7 @@ public final class KubernetesService {
int portNumber = Integer.parseInt(port);
String portName = CheServicePorts.get().get(portNumber);
- portName = isNullOrEmpty(portName) ? exposedPort.replace("/", "-") : portName;
+ portName = isNullOrEmpty(portName) ? "server-" + exposedPort.replace("/", "-") : portName;
int targetPortNumber = portNumber;
ServicePort servicePort = new ServicePort();
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftConnectorTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftConnectorTest.java
index b9949ab36c..3726af54d3 100644
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftConnectorTest.java
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftConnectorTest.java
@@ -16,6 +16,7 @@ import static org.testng.Assert.assertEquals;
import java.io.IOException;
+import org.eclipse.che.api.core.notification.EventService;
import org.eclipse.che.plugin.docker.client.DockerApiVersionPathPrefixProvider;
import org.eclipse.che.plugin.docker.client.DockerConnectorConfiguration;
import org.eclipse.che.plugin.docker.client.DockerRegistryAuthResolver;
@@ -31,9 +32,17 @@ import org.testng.annotations.Test;
public class OpenShiftConnectorTest {
private static final String[] CONTAINER_ENV_VARIABLES = {"CHE_WORKSPACE_ID=abcd1234"};
private static final String CHE_DEFAULT_OPENSHIFT_PROJECT_NAME = "eclipse-che";
- private static final String CHE_DEFAULT_OPENSHIFT_SERVICEACCOUNT = "cheserviceaccount";
private static final int OPENSHIFT_LIVENESS_PROBE_DELAY = 300;
private static final int OPENSHIFT_LIVENESS_PROBE_TIMEOUT = 1;
+ private static final String OPENSHIFT_DEFAULT_WORKSPACE_PERSISTENT_VOLUME_CLAIM = "che_claim_data";
+ private static final String OPENSHIFT_DEFAULT_WORKSPACE_QUANTITY = "10Gi";
+ private static final String OPENSHIFT_DEFAULT_WORKSPACE_STORAGE = "/data/workspaces";
+ private static final String OPENSHIFT_DEFAULT_WORKSPACE_PROJECTS_STORAGE = "/projects";
+ private static final String CHE_DEFAULT_SERVER_EXTERNAL_ADDRESS = "che.openshift.mini";
+ private static final String CHE_WORKSPACE_CPU_LIMIT = "1";
+ private static final boolean SECURE_ROUTES = false;
+ private static final boolean CREATE_WORKSPACE_DIRS = true;
+
@Mock
private DockerConnectorConfiguration dockerConnectorConfiguration;
@@ -45,6 +54,10 @@ public class OpenShiftConnectorTest {
private DockerApiVersionPathPrefixProvider dockerApiVersionPathPrefixProvider;
@Mock
private CreateContainerParams createContainerParams;
+ @Mock
+ private EventService eventService;
+ @Mock
+ private OpenShiftPvcHelper openShiftPvcHelper;
private OpenShiftConnector openShiftConnector;
@@ -62,10 +75,20 @@ public class OpenShiftConnectorTest {
dockerConnectionFactory,
authManager,
dockerApiVersionPathPrefixProvider,
+ openShiftPvcHelper,
+ eventService,
+ CHE_DEFAULT_SERVER_EXTERNAL_ADDRESS,
CHE_DEFAULT_OPENSHIFT_PROJECT_NAME,
- CHE_DEFAULT_OPENSHIFT_SERVICEACCOUNT,
OPENSHIFT_LIVENESS_PROBE_DELAY,
- OPENSHIFT_LIVENESS_PROBE_TIMEOUT);
+ OPENSHIFT_LIVENESS_PROBE_TIMEOUT,
+ OPENSHIFT_DEFAULT_WORKSPACE_PERSISTENT_VOLUME_CLAIM,
+ OPENSHIFT_DEFAULT_WORKSPACE_QUANTITY,
+ OPENSHIFT_DEFAULT_WORKSPACE_STORAGE,
+ OPENSHIFT_DEFAULT_WORKSPACE_PROJECTS_STORAGE,
+ CHE_WORKSPACE_CPU_LIMIT,
+ null,
+ SECURE_ROUTES,
+ CREATE_WORKSPACE_DIRS);
String workspaceID = openShiftConnector.getCheWorkspaceId(createContainerParams);
//Then
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleanerTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleanerTest.java
new file mode 100644
index 0000000000..828e52ae60
--- /dev/null
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/OpenShiftWorkspaceFilesCleanerTest.java
@@ -0,0 +1,176 @@
+/*******************************************************************************
+ * Copyright (c) 2012-2017 Red Hat, Inc.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Red Hat, Inc. - initial API and implementation
+ *******************************************************************************/
+
+package org.eclipse.che.plugin.openshift.client;
+
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.when;
+import static org.testng.Assert.assertEquals;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.verify;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+import org.eclipse.che.api.core.ServerException;
+import org.eclipse.che.api.core.event.ServerIdleEvent;
+import org.eclipse.che.api.core.model.workspace.Workspace;
+import org.eclipse.che.api.core.notification.EventService;
+import org.eclipse.che.api.workspace.server.model.impl.WorkspaceConfigImpl;
+import org.eclipse.che.api.workspace.server.model.impl.WorkspaceImpl;
+import org.mockito.ArgumentCaptor;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+public class OpenShiftWorkspaceFilesCleanerTest {
+
+ private static final String CHE_OPENSHIFT_PROJECT = "eclipse-che";
+ private static final String WORKSPACES_PVC_NAME = "che-data-volume";
+ private static final String WORKSPACE_ONE = "testworkspaceone";
+ private static final String WORKSPACE_TWO = "testworkspacetwo";
+
+ @Mock
+ private OpenShiftPvcHelper pvcHelper;
+ @Mock
+ private ServerIdleEvent serverIdleEvent;
+ private EventService eventService;
+ private OpenShiftWorkspaceFilesCleaner cleaner;
+
+ @BeforeMethod
+ public void setup() {
+ OpenShiftWorkspaceFilesCleaner.clearDeleteQueue();
+ MockitoAnnotations.initMocks(this);
+ eventService = new EventService();
+ cleaner = new OpenShiftWorkspaceFilesCleaner(eventService,
+ pvcHelper,
+ CHE_OPENSHIFT_PROJECT,
+ WORKSPACES_PVC_NAME);
+ }
+
+ @Test
+ public void shouldDoNothingWithoutIdleEvent() throws ServerException, IOException {
+ // Given
+ Workspace workspace = generateWorkspace(WORKSPACE_ONE);
+
+ // When
+ cleaner.clear(workspace);
+
+ // Then
+ verify(pvcHelper, never()).createJobPod(anyString(),
+ anyString(),
+ anyString(),
+ any(OpenShiftPvcHelper.Command.class),
+ any(String[].class));
+ }
+
+ @Test
+ public void shouldDeleteWorkspaceOnIdleEvent() throws ServerException, IOException {
+ // Given
+ Workspace workspace = generateWorkspace(WORKSPACE_ONE);
+
+ // When
+ cleaner.clear(workspace);
+ eventService.publish(serverIdleEvent);
+
+ // Then
+ verify(pvcHelper, times(1)).createJobPod(anyString(),
+ anyString(),
+ anyString(),
+ eq(OpenShiftPvcHelper.Command.REMOVE),
+ eq(WORKSPACE_ONE));
+ }
+
+ @Test
+ public void shouldDeleteMultipleQueuedWorkspacesAtOnce() throws ServerException, IOException {
+ // Given
+ Workspace workspaceOne = generateWorkspace(WORKSPACE_ONE);
+ Workspace workspaceTwo = generateWorkspace(WORKSPACE_TWO);
+ String[] expectedDirs = new String[] {WORKSPACE_ONE, WORKSPACE_TWO};
+ ArgumentCaptor dirCaptor = ArgumentCaptor.forClass(String.class);
+
+ // When
+ cleaner.clear(workspaceOne);
+ cleaner.clear(workspaceTwo);
+ eventService.publish(serverIdleEvent);
+
+ // Then
+ verify(pvcHelper, times(1)).createJobPod(anyString(),
+ anyString(),
+ anyString(),
+ eq(OpenShiftPvcHelper.Command.REMOVE),
+ dirCaptor.capture(), // Varargs capture doesn't seem to work.
+ dirCaptor.capture());
+
+ List dirs = dirCaptor.getAllValues();
+ String[] actualDirs = dirs.toArray(new String[dirs.size()]);
+ // Sort arrays to ignore order
+ Arrays.sort(actualDirs);
+ Arrays.sort(expectedDirs);
+ assertEquals(actualDirs, expectedDirs, "Expected all dirs to be deleted when server is idled.");
+ }
+
+ @Test
+ public void shouldRetainQueueIfDeletionFails() throws ServerException, IOException {
+ // Given
+ Workspace workspaceOne = generateWorkspace(WORKSPACE_ONE);
+ when(pvcHelper.createJobPod(any(), any(), any(), any(), any())).thenReturn(false);
+
+ // When
+ cleaner.clear(workspaceOne);
+ eventService.publish(serverIdleEvent);
+
+ // Then
+ verify(pvcHelper, times(1)).createJobPod(anyString(),
+ anyString(),
+ anyString(),
+ eq(OpenShiftPvcHelper.Command.REMOVE),
+ eq(WORKSPACE_ONE));
+
+ // When
+ eventService.publish(serverIdleEvent);
+
+ // Then
+ verify(pvcHelper, times(2)).createJobPod(anyString(),
+ anyString(),
+ anyString(),
+ eq(OpenShiftPvcHelper.Command.REMOVE),
+ eq(WORKSPACE_ONE));
+ }
+
+ @Test
+ public void shouldUseProjectNamespaceAndPvcNameAsParameters() throws ServerException, IOException {
+ // Given
+ Workspace workspaceOne = generateWorkspace(WORKSPACE_ONE);
+
+ // When
+ cleaner.clear(workspaceOne);
+ eventService.publish(serverIdleEvent);
+
+ // Then
+ verify(pvcHelper, times(1)).createJobPod(eq(WORKSPACES_PVC_NAME),
+ eq(CHE_OPENSHIFT_PROJECT),
+ anyString(),
+ eq(OpenShiftPvcHelper.Command.REMOVE),
+ eq(WORKSPACE_ONE));
+ }
+
+ private Workspace generateWorkspace(String id) {
+ WorkspaceConfigImpl config = new WorkspaceConfigImpl();
+ config.setName(id);
+ return new WorkspaceImpl(id, null, config);
+ }
+}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainerTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainerTest.java
index d3cd0be897..83c0c1dc2a 100644
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainerTest.java
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesContainerTest.java
@@ -44,7 +44,7 @@ public class KubernetesContainerTest {
map(p -> Integer.toString(p.getContainerPort()) +
"/" +
p.getProtocol().toLowerCase()).collect(Collectors.toList());
- assertTrue(exposedPorts.stream().anyMatch(portsAndProtocols::contains));
+ assertTrue(exposedPorts.stream().allMatch(portsAndProtocols::contains));
}
@Test
@@ -61,7 +61,7 @@ public class KubernetesContainerTest {
map(p -> Integer.toString(p.getContainerPort()) +
"/" +
p.getProtocol().toLowerCase()).collect(Collectors.toList());
- assertTrue(imageExposedPorts.keySet().stream().anyMatch(portsAndProtocols::contains));
+ assertTrue(imageExposedPorts.keySet().stream().allMatch(portsAndProtocols::contains));
}
}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVarTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVarTest.java
index 36adea1fd2..ccb63d44b1 100644
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVarTest.java
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesEnvVarTest.java
@@ -44,7 +44,7 @@ public class KubernetesEnvVarTest {
// Then
List keysAndValues = env.stream().map(k -> k.getName() + "=" + k.getValue()).collect(Collectors.toList());
- assertTrue(Arrays.stream(envVariables).anyMatch(keysAndValues::contains));
+ assertTrue(Arrays.stream(envVariables).allMatch(keysAndValues::contains));
}
}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverterTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverterTest.java
index bf43eefc9d..51ad72ac30 100644
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverterTest.java
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesLabelConverterTest.java
@@ -16,15 +16,15 @@ import static org.testng.Assert.assertTrue;
import java.util.HashMap;
import java.util.Map;
-import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class KubernetesLabelConverterTest {
+ private final String prefix = KubernetesLabelConverter.getCheServerLabelPrefix();
+
@Test
public void shouldConvertLabelsToValidKubernetesLabelNames() {
String validLabelRegex = "([A-Za-z0-9][-A-Za-z0-9_\\.]*)?[A-Za-z0-9]";
- String prefix = KubernetesLabelConverter.getCheServerLabelPrefix();
// Given
Map labels = new HashMap<>();
@@ -46,7 +46,6 @@ public class KubernetesLabelConverterTest {
@Test
public void shouldBeAbleToRecoverOriginalLabelsAfterConversion() {
// Given
- String prefix = KubernetesLabelConverter.getCheServerLabelPrefix();
Map originalLabels = new HashMap<>();
originalLabels.put(prefix + "4401/tcp:path:", "/api");
originalLabels.put(prefix + "8000/tcp:ref:", "tomcat-debug");
@@ -59,4 +58,58 @@ public class KubernetesLabelConverterTest {
assertEquals(originalLabels, unconverted);
}
+ @Test
+ public void shouldIgnoreAndLogProblemLabels() {
+ // Given
+ Map originalLabels = new HashMap<>();
+ Map validLabels = new HashMap<>();
+ validLabels.put(prefix + "4401/tcp:path:", "/api");
+ validLabels.put(prefix + "8000/tcp:ref:", "tomcat-debug");
+ Map invalidLabels = new HashMap<>();
+ invalidLabels.put(prefix + "9999/t.cp:path:", "/api");
+ invalidLabels.put(prefix + "1111/tcp:path:", "/a_pi");
+
+ originalLabels.putAll(validLabels);
+ originalLabels.putAll(invalidLabels);
+
+ // When
+ Map converted = KubernetesLabelConverter.labelsToNames(originalLabels);
+ Map unconverted = KubernetesLabelConverter.namesToLabels(converted);
+
+ // Then
+ assertTrue(validLabels.entrySet().stream().allMatch(unconverted.entrySet()::contains),
+ "Valid labels should be there when converting + unconverting");
+ assertTrue(invalidLabels.entrySet().stream().noneMatch(unconverted.entrySet()::contains),
+ "Labels with invalid characters should be ignored");
+ }
+
+ @Test
+ public void shouldIgnoreEmptyValues() {
+ // Given
+ Map originalLabels = new HashMap<>();
+ originalLabels.put(prefix + "4401/tcp:path:", null);
+ originalLabels.put(prefix + "4402/tcp:path:", "");
+ originalLabels.put(prefix + "4403/tcp:path:", " ");
+
+ // When
+ Map converted = KubernetesLabelConverter.labelsToNames(originalLabels);
+
+ // Then
+ assertTrue(converted.isEmpty(), "Labels with null, empty, or whitespace values should be ignored");
+ }
+
+ @Test
+ public void shouldNotIgnoreValuesWithoutPrefix() {
+ // Given
+ Map originalLabels = new HashMap<>();
+ originalLabels.put("4401/tcp:path:", "/api");
+ originalLabels.put(prefix + "8000/tcp:ref:", "tomcat-debug");
+
+ // When
+ Map converted = KubernetesLabelConverter.labelsToNames(originalLabels);
+
+ // Then
+ // Currently we put a warning in the logs but convert these labels anyways.
+ assertTrue(converted.size() == 2, "Should convert labels even without prefix");
+ }
}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapterTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapterTest.java
new file mode 100644
index 0000000000..8dd5a571f5
--- /dev/null
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesOutputAdapterTest.java
@@ -0,0 +1,245 @@
+/*******************************************************************************
+ * Copyright (c) 2012-2017 Red Hat, Inc.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Red Hat, Inc. - initial API and implementation
+ *******************************************************************************/
+
+package org.eclipse.che.plugin.openshift.client.kubernetes;
+
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.eclipse.che.plugin.docker.client.LogMessage;
+import org.eclipse.che.plugin.docker.client.MessageProcessor;
+
+
+public class KubernetesOutputAdapterTest {
+
+ private static LogMessage.Type LOG_TYPE = LogMessage.Type.DOCKER;
+ private testMessageProcessor processor;
+ private KubernetesOutputAdapter adapter;
+
+ private class testMessageProcessor implements MessageProcessor {
+
+ private List messages;
+ private LogMessage.Type type = null;
+
+ public testMessageProcessor() {
+ this.messages = new ArrayList<>();
+ }
+
+ @Override
+ public void process(LogMessage message) {
+ LogMessage.Type messageType = message.getType();
+ if (type == null) {
+ type = messageType;
+ }
+ messages.add(message.getContent());
+ }
+
+ public List getMessages() {
+ return new ArrayList<>(messages);
+ }
+
+ public LogMessage.Type getType() {
+ return type;
+ }
+ };
+
+ @BeforeMethod
+ public void setUp() {
+ processor = new testMessageProcessor();
+ adapter = new KubernetesOutputAdapter(LOG_TYPE, processor);
+ }
+
+ @Test
+ public void shouldBreakLinesCorrectly() {
+ // Given
+ byte[] input = "line1\nline2\n".getBytes();
+ List expected = generateExpected("line1", "line2");
+
+ // When
+ adapter.call(input);
+
+ // Then
+ List actual = processor.getMessages();
+ assertEquals(actual, expected, "Should break lines on \\n char");
+ }
+
+ @Test
+ public void shouldCacheUnfinishedLinesBetweenCalls() {
+ // Given
+ byte[] firstInput = "line1\nlin".getBytes();
+ byte[] secondInput = "e2\nline3\n".getBytes();
+ List expected = generateExpected("line1", "line2", "line3");
+
+ // When
+ adapter.call(firstInput);
+ adapter.call(secondInput);
+
+ // Then
+ List actual = processor.getMessages();
+ assertEquals(actual, expected, "Should store unfinished lines between calls");
+ }
+
+ @Test
+ public void shouldUseProvidedLogMessageType() {
+ for (LogMessage.Type type : LogMessage.Type.values()) {
+ // Given
+ byte[] input = "line1\n".getBytes();
+ LogMessage.Type expected = type;
+ processor = new testMessageProcessor();
+ adapter = new KubernetesOutputAdapter(type, processor);
+
+ // When
+ adapter.call(input);
+
+ // Then
+ LogMessage.Type actual = processor.getType();
+ assertEquals(actual, expected, "Should call MessageProcessor with provided type");
+ }
+ }
+
+ @Test
+ public void shouldBreakLinesNormallyWithCarriageReturn() {
+ // Given
+ byte[] input = "line1\r\nline2\n".getBytes();
+ List expected = generateExpected("line1", "line2");
+
+ // When
+ adapter.call(input);
+
+ // Then
+ List actual = processor.getMessages();
+ assertEquals(actual, expected, "Should break lines normally on \\r\\n characters");
+ }
+
+ @Test
+ public void shouldNotIgnoreEmptyLines() {
+ // Given
+ byte[] input = "line1\n\nline2\n".getBytes();
+ List expected = generateExpected("line1", "", "line2");
+
+ // When
+ adapter.call(input);
+
+ // Then
+ List actual = processor.getMessages();
+ assertEquals(actual, expected, "Should call processor.process() with empty Strings");
+ }
+
+ @Test
+ public void shouldNotCallWithoutFinalNewline() {
+ // Given
+ byte[] input = "line1\nline2".getBytes(); // No trailing \n
+ List firstExpected = generateExpected("line1");
+ List secondExpected = generateExpected("line1", "line2");
+
+ // When
+ adapter.call(input);
+
+ // Then
+ List firstActual = processor.getMessages();
+ assertEquals(firstActual, firstExpected, "Should only process lines when they are terminated by \\n or \\r\\n");
+
+ // When
+ adapter.call("\n".getBytes());
+
+ // Then
+ List secondActual = processor.getMessages();
+ assertEquals(secondActual, secondExpected, "Should buffer lines until newline is encountered.");
+
+ }
+
+ @Test
+ public void shouldIgnoreNullCalls() {
+ // Given
+ byte[] firstInput = "line1\n".getBytes();
+ byte[] secondInput = "line2\n".getBytes();
+ List expected = generateExpected("line1", "line2");
+
+ // When
+ adapter.call(firstInput);
+ adapter.call(null);
+ adapter.call(secondInput);
+
+ // Then
+ List actual = processor.getMessages();
+ assertEquals(actual, expected, "Should ignore calls with null arguments");
+ }
+
+ @Test
+ public void shouldKeepBufferPastNullCalls() {
+ // Given
+ byte[] firstInput = "lin".getBytes();
+ byte[] secondInput = "e1\nline2\n".getBytes();
+ List expected = generateExpected("line1", "line2");
+
+ // When
+ adapter.call(firstInput);
+ adapter.call(null);
+ adapter.call(secondInput);
+
+ // Then
+ List actual = processor.getMessages();
+ assertEquals(actual, expected, "Should ignore calls with null arguments");
+ }
+
+ @Test
+ public void shouldDoNothingWhenExecOutputProcessorIsNull() {
+ // Given
+ byte[] firstInput = "line1\n".getBytes();
+ byte[] secondInput = "line2\n".getBytes();
+ adapter = new KubernetesOutputAdapter(LOG_TYPE, null);
+
+ // When
+ adapter.call(firstInput);
+ adapter.call(secondInput);
+
+ // Then
+ List actual = processor.getMessages();
+ assertTrue(actual.isEmpty(), "Should do nothing when ExecOutputProcessor is null");
+ }
+
+ @Test
+ public void shouldIgnoreCallsWhenDataIsEmpty() {
+ // Given
+ byte[] emptyInput = "".getBytes();
+ byte[] firstInput = "line1\n".getBytes();
+ byte[] secondInput = "line2\n".getBytes();
+ List expected = generateExpected("line1", "line2");
+
+ // When
+ adapter.call(emptyInput);
+ adapter.call(firstInput);
+ adapter.call(emptyInput);
+ adapter.call(secondInput);
+ adapter.call(emptyInput);
+
+ // Then
+ List actual = processor.getMessages();
+ assertEquals(actual, expected, "KubernetesOutputAdapter ignore empty data calls");
+
+ }
+
+ private List generateExpected(String... strings) {
+ List expected = new ArrayList<>();
+ for (String string : strings) {
+ expected.add(string);
+ }
+ return expected;
+ }
+
+
+}
diff --git a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesServiceTest.java b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesServiceTest.java
index a1b575415b..ebcc02a9cf 100644
--- a/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesServiceTest.java
+++ b/plugins/plugin-docker/che-plugin-openshift-client/src/test/java/org/eclipse/che/plugin/openshift/client/kubernetes/KubernetesServiceTest.java
@@ -40,7 +40,7 @@ public class KubernetesServiceTest {
map(p -> Integer.toString(p.getPort()) +
"/" +
p.getProtocol()).collect(Collectors.toList());
- assertTrue(imageExposedPorts.keySet().stream().anyMatch(portsAndProtocols::contains));
+ assertTrue(imageExposedPorts.keySet().stream().allMatch(portsAndProtocols::contains));
}
@Test
@@ -60,7 +60,7 @@ public class KubernetesServiceTest {
map(p -> Integer.toString(p.getPort()) +
"/" +
p.getProtocol()).collect(Collectors.toList());
- assertTrue(exposedPorts.keySet().stream().anyMatch(portsAndProtocols::contains));
+ assertTrue(exposedPorts.keySet().stream().allMatch(portsAndProtocols::contains));
}
@Test
@@ -73,13 +73,13 @@ public class KubernetesServiceTest {
exposedPorts.put("4411/tcp",null);
exposedPorts.put("4412/tcp",null);
exposedPorts.put("8080/tcp",null);
- exposedPorts.put("8888/tcp",null);
+ exposedPorts.put("8000/tcp",null);
exposedPorts.put("9876/tcp",null);
Set expectedPortNames = new HashSet<>();
expectedPortNames.add("sshd");
expectedPortNames.add("wsagent");
- expectedPortNames.add("wsagent-pda");
+ expectedPortNames.add("wsagent-jpda");
expectedPortNames.add("terminal");
expectedPortNames.add("exec-agent");
expectedPortNames.add("tomcat");
@@ -92,7 +92,7 @@ public class KubernetesServiceTest {
map(p -> p.getName()).collect(Collectors.toList());
// Then
- assertTrue(actualPortNames.stream().anyMatch(expectedPortNames::contains));
+ assertTrue(actualPortNames.stream().allMatch(expectedPortNames::contains));
}
@Test
@@ -102,7 +102,7 @@ public class KubernetesServiceTest {
exposedPorts.put("55/tcp",null);
Set expectedPortNames = new HashSet<>();
- expectedPortNames.add("55-tcp");
+ expectedPortNames.add("server-55-tcp");
// When
List servicePorts = KubernetesService.getServicePortsFrom(exposedPorts.keySet());
@@ -110,7 +110,7 @@ public class KubernetesServiceTest {
map(p -> p.getName()).collect(Collectors.toList());
// Then
- assertTrue(actualPortNames.stream().anyMatch(expectedPortNames::contains));
+ assertTrue(actualPortNames.stream().allMatch(expectedPortNames::contains));
}
}
diff --git a/plugins/plugin-git/che-plugin-git-ext-git/src/main/java/org/eclipse/che/ide/ext/git/client/GitServiceClient.java b/plugins/plugin-git/che-plugin-git-ext-git/src/main/java/org/eclipse/che/ide/ext/git/client/GitServiceClient.java
index 4c3201073d..690a63d7ef 100644
--- a/plugins/plugin-git/che-plugin-git-ext-git/src/main/java/org/eclipse/che/ide/ext/git/client/GitServiceClient.java
+++ b/plugins/plugin-git/che-plugin-git-ext-git/src/main/java/org/eclipse/che/ide/ext/git/client/GitServiceClient.java
@@ -10,7 +10,6 @@
*******************************************************************************/
package org.eclipse.che.ide.ext.git.client;
-import org.eclipse.che.api.core.model.workspace.config.ProjectConfig;
import org.eclipse.che.api.git.shared.Branch;
import org.eclipse.che.api.git.shared.BranchListMode;
import org.eclipse.che.api.git.shared.CheckoutRequest;
@@ -28,9 +27,6 @@ import org.eclipse.che.api.git.shared.StatusFormat;
import org.eclipse.che.api.promises.client.Promise;
import org.eclipse.che.commons.annotation.Nullable;
import org.eclipse.che.ide.resource.Path;
-import org.eclipse.che.ide.rest.AsyncRequestCallback;
-import org.eclipse.che.ide.websocket.WebSocketException;
-import org.eclipse.che.ide.websocket.rest.RequestCallback;
import java.util.List;
import java.util.Map;
@@ -40,29 +36,10 @@ import java.util.Map;
*
* @author Ann Zhuleva
* @author Vlad Zhukovskyi
+ * @author Igor Vinokur
*/
public interface GitServiceClient {
- /**
- * Add changes to Git index (temporary storage). Sends request over WebSocket.
- *
- * @param projectConfig
- * project (root of GIT repository)
- * @param update
- * if true then never stage new files, but stage modified new contents of tracked files and remove files from
- * the index if the corresponding files in the working tree have been removed
- * @param filePattern
- * pattern of the files to be added, default is "." (all files are added)
- * @param callback
- * callback
- * @throws WebSocketException
- * @deprecated use {@link #add(Path, boolean, Path[])}
- */
- void add(ProjectConfig projectConfig,
- boolean update,
- List filePattern,
- RequestCallback callback) throws WebSocketException;
-
/**
* Add changes to Git index (temporary storage). Sends request over WebSocket.
*
@@ -73,7 +50,6 @@ public interface GitServiceClient {
* the index if the corresponding files in the working tree have been removed
* @param paths
* pattern of the files to be added, default is "." (all files are added)
- * @throws WebSocketException
*/
Promise add(Path project, boolean update, Path[] paths);
@@ -95,7 +71,6 @@ public interface GitServiceClient {
*
* @param removeDeletedRefs
* if true then delete removed refs from local repository
- * @throws WebSocketException
*/
Promise fetch(Path project, String remote, List refspec, boolean removeDeletedRefs);
@@ -107,51 +82,42 @@ public interface GitServiceClient {
* project (root of GIT repository)
* @param mode
* get remote branches
- * @param callback
- * @deprecated use {@link #branchList(Path, BranchListMode)}
- */
- @Deprecated
- void branchList(ProjectConfig project,
- @Nullable BranchListMode mode,
- AsyncRequestCallback> callback);
-
- /**
- * Get the list of the branches. For now, all branches cannot be returned at once, so the parameter remote tells to get
- * remote branches if true or local ones (if false).
- * @param project
- * project (root of GIT repository)
- * @param mode
*/
Promise> branchList(Path project, BranchListMode mode);
/**
* Delete branch.
- * @param project
+ *
+ * @param project
* project (root of GIT repository)
* @param name
* name of the branch to delete
* @param force
- * force if true delete branch {@code name} even if it is not fully merged
+ * force if true delete branch {@code name} even if it is not fully merged
*/
Promise branchDelete(Path project, String name, boolean force);
/**
* Checkout the branch with pointed name.
- * @param project
+ *
+ * @param project
* project (root of GIT repository)
* @param oldName
* branch's current name
* @param newName
+ * branch's new name
*/
Promise branchRename(Path project, String oldName, String newName);
/**
* Create new branch with pointed name.
- * @param project
+ *
+ * @param project
* project (root of GIT repository)
* @param name
* new branch's name
* @param startPoint
+ * name of a commit at which to start the new branch
*/
Promise branchCreate(Path project, String name, String startPoint);
@@ -160,38 +126,10 @@ public interface GitServiceClient {
*
* @param project
* project (root of GIT repository)
- * @param checkoutRequest
- * checkout request
- * @deprecated {@link #checkout(Path, CheckoutRequest)}
- */
- @Deprecated
- void checkout(ProjectConfig project,
- CheckoutRequest checkoutRequest,
- AsyncRequestCallback callback);
-
- /**
- * Checkout the branch with pointed name.
- * @param project
- * project (root of GIT repository)
* @param request
+ * checkout request
*/
- Promise checkout(Path project, CheckoutRequest request);
-
- /**
- * Get the list of remote repositories for pointed by {@code projectConfig} parameter one.
- *
- * @param projectConfig
- * project (root of GIT repository)
- * @param remoteName
- * remote repository's name. Can be null in case when it is need to fetch all {@link Remote}
- * @param verbose
- * If true show remote url and name otherwise show remote name
- * @return a promise that provides list {@link Remote} repositories for the {@code workspaceId}, {@code projectConfig},
- * {@code remoteName}, {@code verbose} or rejects with an error.
- * @deprecated use {@link #remoteList(Path, String, boolean)}
- */
- @Deprecated
- Promise> remoteList(ProjectConfig projectConfig, @Nullable String remoteName, boolean verbose);
+ Promise checkout(Path project, CheckoutRequest request);
/**
* Get the list of remote repositories for pointed by {@code projectConfig} parameter one.
@@ -204,7 +142,6 @@ public interface GitServiceClient {
* If true show remote url and name otherwise show remote name
* @return a promise that provides list {@link Remote} repositories for the {@code workspaceId}, {@code projectConfig},
* {@code remoteName}, {@code verbose} or rejects with an error.
- * @deprecated use {@link #remoteList(Path, String, boolean)}
*/
Promise> remoteList(Path project, String remote, boolean verbose);
@@ -217,21 +154,6 @@ public interface GitServiceClient {
* remote repository's name
* @param url
* remote repository's URL
- * @deprecated use {@link #remoteAdd(Path, String, String)}
- */
- @Deprecated
- void remoteAdd(ProjectConfig project,
- String name,
- String url,
- AsyncRequestCallback callback);
-
- /**
- * Adds remote repository to the list of remote repositories.
- * @param project
- * project (root of GIT repository)
- * @param name
- * remote repository's name
- * @param url
*/
Promise remoteAdd(Path project, String name, String url);
@@ -242,28 +164,18 @@ public interface GitServiceClient {
* project (root of GIT repository)
* @param name
* remote repository name to delete
- * @deprecated use {@link #remoteDelete(Path, String)}
- */
- @Deprecated
- void remoteDelete(ProjectConfig project,
- String name,
- AsyncRequestCallback callback);
-
- /**
- * Deletes the pointed(by name) remote repository from the list of repositories.
- * @param project
- * project (root of GIT repository)
- * @param name
*/
Promise remoteDelete(Path project, String name);
/**
* Remove items from the working tree and the index.
- * @param project
+ *
+ * @param project
* project (root of GIT repository)
* @param items
* items to remove
* @param cached
+ * is for removal only from index
*/
Promise remove(Path project, Path[] items, boolean cached);
@@ -272,28 +184,33 @@ public interface GitServiceClient {
* 1. Reset files in index - content of files is untouched. Typically it is useful to remove from index mistakenly added files.
* git reset [paths] is the opposite of git add [paths]. 2. Reset the current branch head to [commit] and
* possibly updates the index (resetting it to the tree of [commit]) and the working tree depending on [mode].
- * @param project
+ *
+ * @param project
* project (root of GIT repository)
* @param commit
* commit to which current head should be reset
* @param resetType
- * type of the reset
+ * type of the reset
* @param files
-* pattern of the files to reset the index. If null then reset the current branch head to [commit],
+ * pattern of the files to reset the index. If null then reset the current branch head to [commit],
+ * else reset received files in index.
*/
Promise reset(Path project, String commit, ResetRequest.ResetType resetType, Path[] files);
/**
* Initializes new Git repository (over WebSocket).
- * @param project
+ *
+ * @param project
* project (root of GIT repository)
* @param bare
+ * to create bare repository or not
*/
Promise init(Path project, boolean bare);
/**
* Pull (fetch and merge) changes from remote repository to local one (sends request over WebSocket).
- * @param project
+ *
+ * @param project
* project (root of GIT repository)
* @param refSpec
* list of refspec to fetch.
@@ -305,6 +222,7 @@ public interface GitServiceClient {
* featured - remote branch name.
*
* @param remote
+ * remote remote repository's name
*/
Promise pull(Path project, String refSpec, String remote);
@@ -320,50 +238,22 @@ public interface GitServiceClient {
* @param force
* push refuses to update a remote ref that is not an ancestor of the local ref used to overwrite it. If true
* disables the check. This can cause the remote repository to lose commits
- * @deprecated use {@link #push(Path, List, String, boolean)}
- */
- @Deprecated
- Promise push(ProjectConfig project,
- List refSpec,
- String remote,
- boolean force);
-
- /**
- * Push changes from local repository to remote one (sends request over WebSocket).
- * @param project
- * project
- * @param refSpec
- * list of refspec to push
- * @param remote
- * remote repository name or url
- * @param force
-* push refuses to update a remote ref that is not an ancestor of the local ref used to overwrite it. If true
*/
Promise push(Path project, List refSpec, String remote, boolean force);
/**
- * Performs commit changes from index to repository. The result of the commit is represented by {@link Revision}, which is returned by
- * callback in onSuccess(Revision result). Sends request over WebSocket.
+ * Clones one remote repository to local one (over WebSocket).
*
* @param project
* project (root of GIT repository)
- * @param message
- * commit log message
- * @param all
- * automatically stage files that have been modified and deleted
- * @param amend
- * indicates that previous commit must be overwritten
- * @param callback
- * callback
- * @throws WebSocketException
- * @deprecated use {@link #commit(Path, String, boolean, boolean)}
+ * @param remoteUri
+ * the location of the remote repository
+ * @param remoteName
+ * remote name instead of "origin"
*/
- @Deprecated
- void commit(ProjectConfig project,
- String message,
- boolean all,
- boolean amend,
- AsyncRequestCallback callback);
+ Promise clone(Path project,
+ String remoteUri,
+ String remoteName);
/**
* Performs commit changes from index to repository. The result of the commit is represented by {@link Revision}, which is returned by
@@ -377,62 +267,50 @@ public interface GitServiceClient {
* automatically stage files that have been modified and deleted
* @param amend
* indicates that previous commit must be overwritten
- * @throws WebSocketException
*/
Promise commit(Path project, String message, boolean all, boolean amend);
/**
- * Performs commit for the given files (ignoring git index).
+ * Performs commit changes from index to repository.
*
* @param project
* project (root of GIT repository)
* @param message
* commit log message
- * @param files
- * the list of iles that are commited, ignoring the index
* @param amend
* indicates that previous commit must be overwritten
- * @throws WebSocketException
- */
- Promise commit(Path project, String message, Path[] files, boolean amend);
-
- /**
- * Performs commit changes from index to repository.
- * @param project
- * project (root of GIT repository)
- * @param message
- * commit log message
- * @param all
- * automatically stage files that have been modified and deleted
* @param files
-* the list of files that are committed, ignoring the index
- * @param amend
+ * the list of files that are committed, ignoring the index
*/
- Promise commit(Path project, String message, boolean all, Path[] files, boolean amend);
+ Promise commit(Path project, String message, boolean amend, Path[] files);
/**
* Get repository options.
- * @param project
+ *
+ * @param project
* project (root of GIT repository)
* @param requestedConfig
+ * list of config keys
*/
Promise