mirror of
https://github.com/theonedev/onedev.git
synced 2025-12-08 18:26:30 +00:00
Cache support for Kubernetes executor
This commit is contained in:
parent
93d5db3f0c
commit
94af4a3625
2
pom.xml
2
pom.xml
@ -195,7 +195,7 @@
|
||||
</repository>
|
||||
</repositories>
|
||||
<properties>
|
||||
<commons.version>1.1.3</commons.version>
|
||||
<commons.version>1.1.4</commons.version>
|
||||
<antlr.version>4.7.2</antlr.version>
|
||||
</properties>
|
||||
</project>
|
||||
|
||||
@ -110,41 +110,11 @@
|
||||
<artifactId>websocket-server</artifactId>
|
||||
<version>${jetty.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-guava</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-joda</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.jaxrs</groupId>
|
||||
<artifactId>jackson-jaxrs-json-provider</artifactId>
|
||||
<version>${jackson.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.containers</groupId>
|
||||
<artifactId>jersey-container-servlet</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.core</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
<version>${jersey.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.glassfish.jersey.ext</groupId>
|
||||
<artifactId>jersey-bean-validation</artifactId>
|
||||
@ -239,7 +209,7 @@
|
||||
<dependency>
|
||||
<groupId>org.glassfish.hk2</groupId>
|
||||
<artifactId>guice-bridge</artifactId>
|
||||
<version>2.4.0-b06</version>
|
||||
<version>2.5.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.shiro</groupId>
|
||||
@ -415,6 +385,11 @@
|
||||
<artifactId>org.eclipse.jgit.archive</artifactId>
|
||||
<version>${jgit.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.onedev</groupId>
|
||||
<artifactId>k8s-helper</artifactId>
|
||||
<version>1.0.1</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<properties>
|
||||
<moduleClass>io.onedev.server.CoreModule</moduleClass>
|
||||
@ -426,9 +401,8 @@
|
||||
<groovy.version>2.4.14</groovy.version>
|
||||
<wicket.version>7.10.0</wicket.version>
|
||||
<wicketbootstrap.version>0.10.17</wicketbootstrap.version>
|
||||
<jersey.version>2.13</jersey.version>
|
||||
<jersey.version>2.26</jersey.version>
|
||||
<jetty.version>9.3.27.v20190418</jetty.version>
|
||||
<jackson.version>2.9.8</jackson.version>
|
||||
<servlet.version>3.1.0</servlet.version>
|
||||
</properties>
|
||||
</project>
|
||||
@ -0,0 +1,43 @@
|
||||
package io.onedev.server.ci.job;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.hibernate.validator.constraints.NotEmpty;
|
||||
|
||||
import io.onedev.server.util.validation.annotation.Path;
|
||||
import io.onedev.server.util.validation.annotation.PathSegment;
|
||||
import io.onedev.server.web.editable.annotation.Editable;
|
||||
|
||||
@Editable
|
||||
public class CacheSpec implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String key;
|
||||
|
||||
private String path;
|
||||
|
||||
@Editable(order=100, description="Specify key of the cache. Caches with same key can be reused by different builds")
|
||||
@PathSegment
|
||||
@NotEmpty
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
@Editable(order=200, description="Specify path to cache. Non-absolute path is considered to be relative to job workspace. "
|
||||
+ "Specify \".\" (without quote) to cache workspace itself")
|
||||
@Path
|
||||
@NotEmpty
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,7 +1,9 @@
|
||||
package io.onedev.server.ci.job;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
@ -10,6 +12,7 @@ import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.Callable;
|
||||
@ -19,6 +22,7 @@ import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import java.util.concurrent.locks.Lock;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.inject.Inject;
|
||||
@ -27,14 +31,13 @@ import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.eclipse.jgit.lib.ObjectId;
|
||||
import org.quartz.CronScheduleBuilder;
|
||||
import org.quartz.ScheduleBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.google.common.base.CharMatcher;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Splitter;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import io.onedev.commons.launcher.loader.Listen;
|
||||
import io.onedev.commons.launcher.loader.ListenerRegistry;
|
||||
@ -42,8 +45,7 @@ import io.onedev.commons.utils.ExceptionUtils;
|
||||
import io.onedev.commons.utils.FileUtils;
|
||||
import io.onedev.commons.utils.LockUtils;
|
||||
import io.onedev.commons.utils.MatrixRunner;
|
||||
import io.onedev.commons.utils.schedule.SchedulableTask;
|
||||
import io.onedev.commons.utils.schedule.TaskScheduler;
|
||||
import io.onedev.k8shelper.CacheInstance;
|
||||
import io.onedev.server.OneException;
|
||||
import io.onedev.server.ci.CISpec;
|
||||
import io.onedev.server.ci.InvalidCISpecException;
|
||||
@ -71,7 +73,6 @@ import io.onedev.server.model.Project;
|
||||
import io.onedev.server.model.Setting;
|
||||
import io.onedev.server.model.Setting.Key;
|
||||
import io.onedev.server.model.User;
|
||||
import io.onedev.server.model.support.JobContext;
|
||||
import io.onedev.server.model.support.JobExecutor;
|
||||
import io.onedev.server.persistence.SessionManager;
|
||||
import io.onedev.server.persistence.TransactionManager;
|
||||
@ -83,10 +84,9 @@ import io.onedev.server.util.JobLogger;
|
||||
import io.onedev.server.util.inputspec.InputSpec;
|
||||
import io.onedev.server.util.inputspec.SecretInput;
|
||||
import io.onedev.server.util.patternset.PatternSet;
|
||||
import jersey.repackaged.com.google.common.collect.Sets;
|
||||
|
||||
@Singleton
|
||||
public class DefaultJobManager implements JobManager, Runnable, SchedulableTask, CodePullAuthorizationSource {
|
||||
public class DefaultJobManager implements JobManager, Runnable, CodePullAuthorizationSource {
|
||||
|
||||
private static final int CHECK_INTERVAL = 1000; // check internal in milli-seconds
|
||||
|
||||
@ -116,22 +116,17 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
|
||||
|
||||
private final Set<DependencyPopulator> dependencyPopulators;
|
||||
|
||||
private final TaskScheduler taskScheduler;
|
||||
|
||||
private final BuildParamManager buildParamManager;
|
||||
|
||||
private volatile List<JobExecutor> jobExecutors;
|
||||
|
||||
private String taskId;
|
||||
|
||||
private volatile Status status;
|
||||
|
||||
@Inject
|
||||
public DefaultJobManager(BuildManager buildManager, UserManager userManager,
|
||||
ListenerRegistry listenerRegistry, SettingManager settingManager,
|
||||
TransactionManager transactionManager, JobLogManager logManager, ExecutorService executorService,
|
||||
SessionManager sessionManager, Set<DependencyPopulator> dependencyPopulators,
|
||||
TaskScheduler taskScheduler, BuildParamManager buildParamManager) {
|
||||
public DefaultJobManager(BuildManager buildManager, UserManager userManager, ListenerRegistry listenerRegistry,
|
||||
SettingManager settingManager, TransactionManager transactionManager, JobLogManager logManager,
|
||||
ExecutorService executorService, SessionManager sessionManager,
|
||||
Set<DependencyPopulator> dependencyPopulators, BuildParamManager buildParamManager) {
|
||||
this.settingManager = settingManager;
|
||||
this.buildManager = buildManager;
|
||||
this.userManager = userManager;
|
||||
@ -141,7 +136,6 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
|
||||
this.executorService = executorService;
|
||||
this.dependencyPopulators = dependencyPopulators;
|
||||
this.sessionManager = sessionManager;
|
||||
this.taskScheduler = taskScheduler;
|
||||
this.buildParamManager = buildParamManager;
|
||||
}
|
||||
|
||||
@ -316,13 +310,11 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
|
||||
|
||||
});
|
||||
|
||||
logger.log("Executing job with executor '" + executor.getName() + "'...");
|
||||
|
||||
List<String> commands = Splitter.on("\n").trimResults(CharMatcher.is('\r')).splitToList(job.getCommands());
|
||||
|
||||
JobContext jobContext = new JobContext(projectName, projectGitDir, job.getEnvironment(),
|
||||
serverWorkspace, envVars, commands, job.isCloneSource(), commitId, job.getCaches(),
|
||||
new PatternSet(includeFiles, excludeFiles), logger) {
|
||||
JobContext jobContext = new JobContext(projectName, projectGitDir, job.getEnvironment(), serverWorkspace,
|
||||
envVars, commands, job.isRetrieveSource(), commitId, job.getCaches(),
|
||||
new PatternSet(includeFiles, excludeFiles), executor.getCacheTTL(), logger) {
|
||||
|
||||
@Override
|
||||
public void notifyJobRunning() {
|
||||
@ -375,8 +367,6 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
|
||||
throw e;
|
||||
}
|
||||
} finally {
|
||||
logger.log("Deleting server workspace...");
|
||||
executor.cleanDir(serverWorkspace);
|
||||
FileUtils.deleteDir(serverWorkspace);
|
||||
logger.log("Job finished");
|
||||
}
|
||||
@ -397,8 +387,11 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
|
||||
}
|
||||
|
||||
@Override
|
||||
public JobContext getJobContext(String jobToken) {
|
||||
return jobContexts.get(jobToken);
|
||||
public JobContext getJobContext(String jobToken, boolean mustExist) {
|
||||
JobContext jobContext = jobContexts.get(jobToken);
|
||||
if (mustExist && jobContext == null)
|
||||
throw new OneException("No job context found for specified job token");
|
||||
return jobContext;
|
||||
}
|
||||
|
||||
private void markBuildError(Build build, String errorMessage) {
|
||||
@ -504,12 +497,10 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
|
||||
status = Status.STARTED;
|
||||
jobExecutors = settingManager.getJobExecutors();
|
||||
new Thread(this).start();
|
||||
taskId = taskScheduler.schedule(this);
|
||||
}
|
||||
|
||||
@Listen
|
||||
public void on(SystemStopping event) {
|
||||
taskScheduler.unschedule(taskId);
|
||||
if (status == Status.STARTED) {
|
||||
status = Status.STOPPING;
|
||||
while (status == Status.STOPPING) {
|
||||
@ -626,26 +617,69 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute() {
|
||||
for (JobExecutor executor: jobExecutors)
|
||||
executor.checkCaches();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScheduleBuilder<?> getScheduleBuilder() {
|
||||
return CronScheduleBuilder.dailyAtHourAndMinute(0, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canPullCode(HttpServletRequest request, Project project) {
|
||||
String jobToken = request.getHeader(JOB_TOKEN_HTTP_HEADER);
|
||||
if (jobToken != null) {
|
||||
JobContext context = getJobContext(jobToken);
|
||||
JobContext context = getJobContext(jobToken, false);
|
||||
if (context != null)
|
||||
return context.getProjectName().equals(project.getName());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Map<CacheInstance, String> allocateJobCaches(String jobToken, Date currentTime,
|
||||
Map<CacheInstance, Date> cacheInstances) {
|
||||
JobContext context = getJobContext(jobToken, true);
|
||||
|
||||
List<CacheInstance> sortedInstances = new ArrayList<>(cacheInstances.keySet());
|
||||
sortedInstances.sort(new Comparator<CacheInstance>() {
|
||||
|
||||
@Override
|
||||
public int compare(CacheInstance o1, CacheInstance o2) {
|
||||
return cacheInstances.get(o2).compareTo(cacheInstances.get(o1));
|
||||
}
|
||||
|
||||
});
|
||||
Collection<String> allAllocated = new HashSet<>();
|
||||
for (JobContext each: jobContexts.values())
|
||||
allAllocated.addAll(each.getAllocatedCaches());
|
||||
Map<CacheInstance, String> allocations = new HashMap<>();
|
||||
for (CacheSpec cacheSpec: context.getCacheSpecs()) {
|
||||
Optional<CacheInstance> result = sortedInstances
|
||||
.stream()
|
||||
.filter(it->it.getCacheKey().equals(cacheSpec.getKey()))
|
||||
.filter(it->!allAllocated.contains(it.getName()))
|
||||
.findFirst();
|
||||
CacheInstance allocation;
|
||||
if (result.isPresent())
|
||||
allocation = result.get();
|
||||
else
|
||||
allocation = new CacheInstance(UUID.randomUUID().toString(), cacheSpec.getKey());
|
||||
allocations.put(allocation, cacheSpec.getPath());
|
||||
context.getAllocatedCaches().add(allocation.getName());
|
||||
allAllocated.add(allocation.getName());
|
||||
}
|
||||
|
||||
Consumer<CacheInstance> cacheCleaner = new Consumer<CacheInstance>() {
|
||||
|
||||
@Override
|
||||
public void accept(CacheInstance instance) {
|
||||
long ellapsed = currentTime.getTime() - cacheInstances.get(instance).getTime();
|
||||
if (ellapsed > context.getCacheTTL() * 24L * 3600L * 1000L) {
|
||||
allocations.put(instance, null);
|
||||
context.getAllocatedCaches().add(instance.getName());
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
cacheInstances.keySet()
|
||||
.stream()
|
||||
.filter(it->!allAllocated.contains(it.getName()))
|
||||
.forEach(cacheCleaner);
|
||||
|
||||
return allocations;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -20,7 +20,6 @@ import org.eclipse.jgit.lib.ObjectId;
|
||||
import org.hibernate.validator.constraints.NotEmpty;
|
||||
|
||||
import io.onedev.server.ci.JobDependency;
|
||||
import io.onedev.server.ci.job.cache.JobCache;
|
||||
import io.onedev.server.ci.job.param.JobParam;
|
||||
import io.onedev.server.ci.job.trigger.JobTrigger;
|
||||
import io.onedev.server.event.ProjectEvent;
|
||||
@ -48,7 +47,7 @@ public class Job implements Serializable, Validatable {
|
||||
|
||||
private String commands;
|
||||
|
||||
private boolean cloneSource = true;
|
||||
private boolean retrieveSource = true;
|
||||
|
||||
private List<JobDependency> dependencies = new ArrayList<>();
|
||||
|
||||
@ -56,7 +55,7 @@ public class Job implements Serializable, Validatable {
|
||||
|
||||
private List<JobTrigger> triggers = new ArrayList<>();
|
||||
|
||||
private List<JobCache> caches = new ArrayList<>();
|
||||
private List<CacheSpec> caches = new ArrayList<>();
|
||||
|
||||
private long timeout = 3600;
|
||||
|
||||
@ -106,14 +105,14 @@ public class Job implements Serializable, Validatable {
|
||||
this.commands = commands;
|
||||
}
|
||||
|
||||
@Editable(order=130, description="Whether or not to clone the source code. If enabled, the repository will be "
|
||||
+ "cloned into job workspace")
|
||||
public boolean isCloneSource() {
|
||||
return cloneSource;
|
||||
@Editable(order=130, description="Whether or not to retrieve the source code. If enabled, the repository will be "
|
||||
+ "retrieved into job workspace")
|
||||
public boolean isRetrieveSource() {
|
||||
return retrieveSource;
|
||||
}
|
||||
|
||||
public void setCloneSource(boolean cloneSource) {
|
||||
this.cloneSource = cloneSource;
|
||||
public void setRetrieveSource(boolean retrieveSource) {
|
||||
this.retrieveSource = retrieveSource;
|
||||
}
|
||||
|
||||
@Editable(name="Dependency Jobs", order=140, description="Job dependencies determines the order and "
|
||||
@ -148,11 +147,11 @@ public class Job implements Serializable, Validatable {
|
||||
+ "projects, you may cache the <tt>node_modules</tt> folder to avoid downloading node modules for "
|
||||
+ "subsequent job executions. Note that cache is considered as a best-effort approach and your "
|
||||
+ "build script should always consider that cache might not be available")
|
||||
public List<JobCache> getCaches() {
|
||||
public List<CacheSpec> getCaches() {
|
||||
return caches;
|
||||
}
|
||||
|
||||
public void setCaches(List<JobCache> caches) {
|
||||
public void setCaches(List<CacheSpec> caches) {
|
||||
this.caches = caches;
|
||||
}
|
||||
|
||||
@ -179,7 +178,7 @@ public class Job implements Serializable, Validatable {
|
||||
Set<String> paths = new HashSet<>();
|
||||
|
||||
boolean isValid = true;
|
||||
for (JobCache cache: caches) {
|
||||
for (CacheSpec cache: caches) {
|
||||
if (keys.contains(cache.getKey())) {
|
||||
isValid = false;
|
||||
context.buildConstraintViolationWithTemplate("Duplicate key: " + cache.getKey())
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
package io.onedev.server.model.support;
|
||||
package io.onedev.server.ci.job;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -11,7 +12,6 @@ import org.eclipse.jgit.api.errors.GitAPIException;
|
||||
import org.eclipse.jgit.lib.ObjectId;
|
||||
|
||||
import io.onedev.commons.utils.ExceptionUtils;
|
||||
import io.onedev.server.ci.job.cache.JobCache;
|
||||
import io.onedev.server.git.command.CheckoutCommand;
|
||||
import io.onedev.server.git.command.FetchCommand;
|
||||
import io.onedev.server.util.JobLogger;
|
||||
@ -31,30 +31,35 @@ public abstract class JobContext {
|
||||
|
||||
private final List<String> commands;
|
||||
|
||||
private final boolean cloneSource;
|
||||
private final boolean retrieveSource;
|
||||
|
||||
private final ObjectId commitId;
|
||||
|
||||
private final Collection<JobCache> caches;
|
||||
private final Collection<CacheSpec> cacheSpecs;
|
||||
|
||||
private final PatternSet collectFiles;
|
||||
|
||||
private final int cacheTTL;
|
||||
|
||||
private final JobLogger logger;
|
||||
|
||||
public JobContext(String projectName, File gitDir, String environment, File workspace,
|
||||
Map<String, String> envVars, List<String> commands, boolean cloneSource,
|
||||
ObjectId commitId, Collection<JobCache> caches, PatternSet collectFiles,
|
||||
JobLogger logger) {
|
||||
private final Collection<String> allocatedCaches = new HashSet<>();
|
||||
|
||||
public JobContext(String projectName, File gitDir, String environment,
|
||||
File workspace, Map<String, String> envVars, List<String> commands,
|
||||
boolean retrieveSource, ObjectId commitId, Collection<CacheSpec> caches,
|
||||
PatternSet collectFiles, int cacheTTL, JobLogger logger) {
|
||||
this.projectName = projectName;
|
||||
this.gitDir = gitDir;
|
||||
this.environment = environment;
|
||||
this.serverWorkspace = workspace;
|
||||
this.envVars = envVars;
|
||||
this.commands = commands;
|
||||
this.cloneSource = cloneSource;
|
||||
this.retrieveSource = retrieveSource;
|
||||
this.commitId = commitId;
|
||||
this.caches = caches;
|
||||
this.cacheSpecs = caches;
|
||||
this.collectFiles = collectFiles;
|
||||
this.cacheTTL = cacheTTL;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
@ -82,12 +87,12 @@ public abstract class JobContext {
|
||||
return commitId;
|
||||
}
|
||||
|
||||
public boolean isCloneSource() {
|
||||
return cloneSource;
|
||||
public boolean isRetrieveSource() {
|
||||
return retrieveSource;
|
||||
}
|
||||
|
||||
public Collection<JobCache> getCaches() {
|
||||
return caches;
|
||||
public Collection<CacheSpec> getCacheSpecs() {
|
||||
return cacheSpecs;
|
||||
}
|
||||
|
||||
public PatternSet getCollectFiles() {
|
||||
@ -103,7 +108,7 @@ public abstract class JobContext {
|
||||
new CheckoutCommand(checkoutDir).refspec(commitId.name()).call();
|
||||
}
|
||||
|
||||
public void checkoutSource(File dir) {
|
||||
public void retrieveSource(File dir) {
|
||||
if (new File(dir, ".git").exists()) {
|
||||
try (Git git = Git.open(dir)) {
|
||||
fetchAndCheckout(dir);
|
||||
@ -119,6 +124,14 @@ public abstract class JobContext {
|
||||
}
|
||||
}
|
||||
|
||||
public int getCacheTTL() {
|
||||
return cacheTTL;
|
||||
}
|
||||
|
||||
public Collection<String> getAllocatedCaches() {
|
||||
return allocatedCaches;
|
||||
}
|
||||
|
||||
public abstract void notifyJobRunning();
|
||||
|
||||
}
|
||||
@ -1,5 +1,6 @@
|
||||
package io.onedev.server.ci.job;
|
||||
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -7,10 +8,10 @@ import javax.annotation.Nullable;
|
||||
|
||||
import org.eclipse.jgit.lib.ObjectId;
|
||||
|
||||
import io.onedev.k8shelper.CacheInstance;
|
||||
import io.onedev.server.model.Build;
|
||||
import io.onedev.server.model.Project;
|
||||
import io.onedev.server.model.User;
|
||||
import io.onedev.server.model.support.JobContext;
|
||||
|
||||
public interface JobManager {
|
||||
|
||||
@ -23,6 +24,9 @@ public interface JobManager {
|
||||
|
||||
void cancel(Build build, @Nullable User canceller);
|
||||
|
||||
JobContext getJobContext(String jobToken);
|
||||
JobContext getJobContext(String jobToken, boolean mustExist);
|
||||
|
||||
Map<CacheInstance, String> allocateJobCaches(String jobToken, Date currentTime,
|
||||
Map<CacheInstance, Date> cacheInstances);
|
||||
|
||||
}
|
||||
|
||||
@ -1,48 +0,0 @@
|
||||
package io.onedev.server.ci.job.cache;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
|
||||
import io.onedev.server.OneException;
|
||||
|
||||
public class CacheAllocation {
|
||||
|
||||
private final File instance;
|
||||
|
||||
private final String path;
|
||||
|
||||
public CacheAllocation(File instance, String path) {
|
||||
this.instance = instance;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public File getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
File lockFile = new File(instance, JobCache.LOCK_FILE);
|
||||
if (!lockFile.delete())
|
||||
throw new OneException("Unable to delete file: " + lockFile.getAbsolutePath());
|
||||
}
|
||||
|
||||
public String resolvePath(String basePath) {
|
||||
String path = getPath();
|
||||
if (path == null)
|
||||
path = "";
|
||||
if (FilenameUtils.getPrefixLength(path) != 0)
|
||||
return path;
|
||||
else
|
||||
return basePath + "/" + path;
|
||||
}
|
||||
|
||||
public boolean isWorkspace() {
|
||||
return path == null || FilenameUtils.normalize(path).length() == 0;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
package io.onedev.server.ci.job.cache;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
public interface CacheCallable<T> {
|
||||
|
||||
T call(Collection<CacheAllocation> allocations);
|
||||
|
||||
}
|
||||
@ -1,48 +0,0 @@
|
||||
package io.onedev.server.ci.job.cache;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.onedev.commons.utils.ExceptionUtils;
|
||||
import io.onedev.commons.utils.FileUtils;
|
||||
|
||||
public class CacheRunner {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(CacheRunner.class);
|
||||
|
||||
private final File cacheHome;
|
||||
|
||||
private final Collection<JobCache> caches;
|
||||
|
||||
public CacheRunner(File cacheHome, Collection<JobCache> caches) {
|
||||
this.cacheHome = cacheHome;
|
||||
this.caches = caches;
|
||||
}
|
||||
|
||||
public <T> T call(CacheCallable<T> callable) {
|
||||
Collection<CacheAllocation> allocations = new ArrayList<>();
|
||||
try {
|
||||
if (!cacheHome.exists())
|
||||
FileUtils.createDir(cacheHome);
|
||||
|
||||
for (JobCache cache: caches)
|
||||
allocations.add(cache.allocate(cacheHome));
|
||||
return callable.call(allocations);
|
||||
} catch (Exception e) {
|
||||
throw ExceptionUtils.unchecked(e);
|
||||
} finally {
|
||||
for (CacheAllocation allocation: allocations) {
|
||||
try {
|
||||
allocation.release();
|
||||
} catch (Exception e) {
|
||||
logger.error("Error releasing allocated cache", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,80 +0,0 @@
|
||||
package io.onedev.server.ci.job.cache;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.Serializable;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
import org.hibernate.validator.constraints.NotEmpty;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import io.onedev.commons.utils.FileUtils;
|
||||
import io.onedev.commons.utils.LockUtils;
|
||||
import io.onedev.server.OneException;
|
||||
import io.onedev.server.util.validation.annotation.Path;
|
||||
import io.onedev.server.util.validation.annotation.PathSegment;
|
||||
import io.onedev.server.web.editable.annotation.Editable;
|
||||
|
||||
@Editable
|
||||
public class JobCache implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public static final String LOCK_FILE = "$OneDev-Cache-Lock$";
|
||||
|
||||
private String key;
|
||||
|
||||
private String path;
|
||||
|
||||
@Editable(order=100, description="Specify key of the cache. Caches with same key can be reused by different builds")
|
||||
@PathSegment
|
||||
@NotEmpty
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public void setKey(String key) {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
@Editable(order=200, description="Specify path to cache. Non-absolute path is considered to be relative to job workspace. "
|
||||
+ "Specify \".\" (without quote) to cache workspace itself")
|
||||
@Path
|
||||
@NotEmpty
|
||||
public String getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public void setPath(String path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public CacheAllocation allocate(File cacheHome) {
|
||||
File keyDir = new File(cacheHome, getKey());
|
||||
if (!keyDir.exists())
|
||||
FileUtils.createDir(keyDir);
|
||||
return LockUtils.call(keyDir.getAbsolutePath(), new Callable<CacheAllocation>() {
|
||||
|
||||
@Override
|
||||
public CacheAllocation call() throws Exception {
|
||||
List<File> cacheInstances = Lists.newArrayList(keyDir.listFiles());
|
||||
cacheInstances.sort(Comparator.comparing(File::lastModified).reversed());
|
||||
for (File cacheInstance: cacheInstances) {
|
||||
if (new File(cacheInstance, LOCK_FILE).createNewFile())
|
||||
return new CacheAllocation(cacheInstance, path);
|
||||
}
|
||||
File cacheInstance = new File(keyDir, UUID.randomUUID().toString());
|
||||
FileUtils.createDir(cacheInstance);
|
||||
File lockFile = new File(cacheInstance, LOCK_FILE);
|
||||
if (!lockFile.createNewFile())
|
||||
throw new OneException("Unable to create file: " + lockFile.getAbsolutePath());
|
||||
return new CacheAllocation(cacheInstance, path);
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,3 +0,0 @@
|
||||
package io.onedev.server.ci.job.log;
|
||||
|
||||
public enum LogLevel {ERROR, WARN, INFO, DEBUG, TRACE}
|
||||
@ -173,8 +173,11 @@ public class DefaultProjectManager extends AbstractEntityManager<Project> implem
|
||||
@Override
|
||||
public void delete(Project project) {
|
||||
Usage usage = new Usage();
|
||||
for (JobExecutor jobExecutor: settingManager.getJobExecutors())
|
||||
usage.add(jobExecutor.onDeleteProject(project.getName()).prefix("administration"));
|
||||
int index = 0;
|
||||
for (JobExecutor jobExecutor: settingManager.getJobExecutors()) {
|
||||
usage.add(jobExecutor.onDeleteProject(project.getName(), index).prefix("administration"));
|
||||
index++;
|
||||
}
|
||||
|
||||
usage.checkInUse("Project '" + project.getName() + "'");
|
||||
|
||||
|
||||
@ -1,18 +1,16 @@
|
||||
package io.onedev.server.model.support;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.eclipse.jgit.lib.ObjectId;
|
||||
import org.hibernate.validator.constraints.NotEmpty;
|
||||
|
||||
import io.onedev.commons.launcher.loader.ExtensionPoint;
|
||||
import io.onedev.commons.utils.stringmatch.ChildAwareMatcher;
|
||||
import io.onedev.commons.utils.stringmatch.Matcher;
|
||||
import io.onedev.server.ci.job.JobContext;
|
||||
import io.onedev.server.model.Project;
|
||||
import io.onedev.server.util.Usage;
|
||||
import io.onedev.server.util.patternset.PatternSet;
|
||||
import io.onedev.server.util.validation.annotation.ExecutorName;
|
||||
import io.onedev.server.web.editable.annotation.BranchPatterns;
|
||||
import io.onedev.server.web.editable.annotation.Editable;
|
||||
import io.onedev.server.web.editable.annotation.NameOfEmptyValue;
|
||||
@ -25,12 +23,8 @@ public abstract class JobExecutor implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public static final String WORKSPACE = "onedev-workspace";
|
||||
|
||||
private boolean enabled = true;
|
||||
|
||||
private String name;
|
||||
|
||||
private String projects;
|
||||
|
||||
private String branches;
|
||||
@ -48,20 +42,10 @@ public abstract class JobExecutor implements Serializable {
|
||||
public void setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
@Editable(order=50, description="Specify a name to identify the executor")
|
||||
@ExecutorName
|
||||
@NotEmpty
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Editable(order=10000, name="Applicable Projects", group="Job Applicability",
|
||||
description="Optionally specify space-separated projects applicable for this executor. Use * or ? for wildcard match")
|
||||
description="Optionally specify space-separated projects applicable for this executor. "
|
||||
+ "Use * or ? for wildcard match")
|
||||
@ProjectPatterns
|
||||
@NameOfEmptyValue("All")
|
||||
public String getProjects() {
|
||||
@ -73,7 +57,8 @@ public abstract class JobExecutor implements Serializable {
|
||||
}
|
||||
|
||||
@Editable(order=10100, name="Applicable Branches", group="Job Applicability",
|
||||
description="Optionally specify space-separated branches applicable for this executor. Use * or ? for wildcard match")
|
||||
description="Optionally specify space-separated branches applicable for this executor. "
|
||||
+ "Use * or ? for wildcard match")
|
||||
@BranchPatterns
|
||||
@NameOfEmptyValue("All")
|
||||
public String getBranches() {
|
||||
@ -85,7 +70,8 @@ public abstract class JobExecutor implements Serializable {
|
||||
}
|
||||
|
||||
@Editable(order=10200, name="Applicable Jobs", group="Job Applicability",
|
||||
description="Optionally specify space-separated jobs applicable for this executor. Use * or ? for wildcard match")
|
||||
description="Optionally specify space-separated jobs applicable for this executor. "
|
||||
+ "Use * or ? for wildcard match")
|
||||
@Patterns
|
||||
@NameOfEmptyValue("All")
|
||||
public String getJobs() {
|
||||
@ -97,7 +83,8 @@ public abstract class JobExecutor implements Serializable {
|
||||
}
|
||||
|
||||
@Editable(order=10300, name="Applicable Environments", group="Job Applicability",
|
||||
description="Optionally specify space-separated job environments applicable for this executor. Use * or ? for wildcard match")
|
||||
description="Optionally specify space-separated job environments applicable for this executor. "
|
||||
+ "Use * or ? for wildcard match")
|
||||
@Patterns
|
||||
@NameOfEmptyValue("All")
|
||||
public String getEnvironments() {
|
||||
@ -108,9 +95,9 @@ public abstract class JobExecutor implements Serializable {
|
||||
this.environments = environments;
|
||||
}
|
||||
|
||||
@Editable(order=50000, group="More Settings", description="Specify job cache TTL (time to live) by days. OneDev may create "
|
||||
+ "multiple job caches even for same cache key to avoid cache conflicts when running jobs "
|
||||
+ "concurrently. This setting tells OneDev to remove caches inactive for specified "
|
||||
@Editable(order=50000, group="More Settings", description="Specify job cache TTL (time to live) by days. "
|
||||
+ "OneDev may create multiple job caches even for same cache key to avoid cache conflicts when "
|
||||
+ "running jobs concurrently. This setting tells OneDev to remove caches inactive for specified "
|
||||
+ "time period to save disk space")
|
||||
public int getCacheTTL() {
|
||||
return cacheTTL;
|
||||
@ -132,14 +119,14 @@ public abstract class JobExecutor implements Serializable {
|
||||
&& (getBranches() == null || project.isCommitOnBranches(commitId, getBranches()));
|
||||
}
|
||||
|
||||
public Usage onDeleteProject(String projectName) {
|
||||
public Usage onDeleteProject(String projectName, int executorIndex) {
|
||||
Usage usage = new Usage();
|
||||
if (getProjects() != null) {
|
||||
PatternSet patternSet = PatternSet.fromString(getProjects());
|
||||
if (patternSet.getIncludes().contains(projectName) || patternSet.getExcludes().contains(projectName))
|
||||
usage.add("applicable projects");
|
||||
}
|
||||
return usage.prefix(getName()).prefix("job executor '" + getName() + "'");
|
||||
return usage.prefix("job executor #" + executorIndex);
|
||||
}
|
||||
|
||||
public void onRenameProject(String oldName, String newName) {
|
||||
@ -151,8 +138,4 @@ public abstract class JobExecutor implements Serializable {
|
||||
setProjects(patternSet.toString());
|
||||
}
|
||||
|
||||
public abstract void checkCaches();
|
||||
|
||||
public abstract void cleanDir(File dir);
|
||||
|
||||
}
|
||||
|
||||
@ -7,6 +7,7 @@ import javax.validation.Valid;
|
||||
import javax.validation.constraints.NotNull;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import org.hibernate.validator.constraints.NotEmpty;
|
||||
|
||||
import io.onedev.server.git.config.CurlConfig;
|
||||
|
||||
@ -1,21 +1,16 @@
|
||||
package io.onedev.server.rest.jersey;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import javax.ws.rs.ext.MessageBodyReader;
|
||||
import javax.ws.rs.ext.MessageBodyWriter;
|
||||
|
||||
import org.glassfish.hk2.api.ServiceLocator;
|
||||
import org.glassfish.jersey.CommonProperties;
|
||||
import org.glassfish.jersey.internal.util.PropertiesHelper;
|
||||
import org.glassfish.jersey.jackson.JacksonFeature;
|
||||
import org.glassfish.jersey.server.ResourceConfig;
|
||||
import org.glassfish.jersey.server.ServerProperties;
|
||||
import org.jvnet.hk2.guice.bridge.api.GuiceBridge;
|
||||
import org.jvnet.hk2.guice.bridge.api.GuiceIntoHK2Bridge;
|
||||
|
||||
import com.fasterxml.jackson.jaxrs.base.JsonMappingExceptionMapper;
|
||||
import com.fasterxml.jackson.jaxrs.base.JsonParseExceptionMapper;
|
||||
import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider;
|
||||
|
||||
import io.onedev.commons.launcher.loader.AppLoader;
|
||||
|
||||
public class JerseyApplication extends ResourceConfig {
|
||||
@ -33,9 +28,7 @@ public class JerseyApplication extends ResourceConfig {
|
||||
property(ServerProperties.BV_SEND_ERROR_IN_RESPONSE, true);
|
||||
|
||||
// add the default Jackson exception mappers
|
||||
register(JsonParseExceptionMapper.class);
|
||||
register(JsonMappingExceptionMapper.class);
|
||||
register(JacksonJsonProvider.class, MessageBodyReader.class, MessageBodyWriter.class);
|
||||
register(JacksonFeature.class);
|
||||
|
||||
packages(JerseyApplication.class.getPackage().getName());
|
||||
|
||||
|
||||
@ -166,5 +166,12 @@ public class DefaultStorageManager implements StorageManager {
|
||||
FileUtils.createDir(buildDir);
|
||||
return buildDir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public File getJobCacheDir() {
|
||||
File projectsDir = new File(getStorageDir(), "jobcache");
|
||||
FileUtils.createDir(projectsDir);
|
||||
return projectsDir;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -49,4 +49,6 @@ public interface StorageManager {
|
||||
*/
|
||||
File getBuildDir(Long projectId, Long buildNumber);
|
||||
|
||||
File getJobCacheDir();
|
||||
|
||||
}
|
||||
|
||||
@ -21,8 +21,6 @@ import com.fasterxml.jackson.databind.jsontype.TypeDeserializer;
|
||||
import com.fasterxml.jackson.databind.jsontype.TypeResolverBuilder;
|
||||
import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
|
||||
import com.fasterxml.jackson.databind.jsontype.impl.StdTypeResolverBuilder;
|
||||
import com.fasterxml.jackson.datatype.guava.GuavaModule;
|
||||
import com.fasterxml.jackson.datatype.joda.JodaModule;
|
||||
|
||||
@Singleton
|
||||
public class ObjectMapperProvider implements Provider<ObjectMapper> {
|
||||
@ -37,8 +35,6 @@ public class ObjectMapperProvider implements Provider<ObjectMapper> {
|
||||
@Override
|
||||
public ObjectMapper get() {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
mapper.registerModule(new GuavaModule());
|
||||
mapper.registerModule(new JodaModule());
|
||||
|
||||
TypeResolverBuilder<?> typer = new StdTypeResolverBuilder() {
|
||||
|
||||
|
||||
@ -1,37 +0,0 @@
|
||||
package io.onedev.server.util.validation;
|
||||
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.validation.ConstraintValidator;
|
||||
import javax.validation.ConstraintValidatorContext;
|
||||
|
||||
import io.onedev.server.util.validation.annotation.ExecutorName;
|
||||
|
||||
public class ExecutorNameValidator implements ConstraintValidator<ExecutorName, String> {
|
||||
|
||||
private static final Pattern PATTERN = Pattern.compile("[\\w-\\.]+");
|
||||
|
||||
@Override
|
||||
public void initialize(ExecutorName constaintAnnotation) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValid(String value, ConstraintValidatorContext constraintContext) {
|
||||
if (value == null) {
|
||||
return true;
|
||||
} else if (!PATTERN.matcher(value).matches()) {
|
||||
constraintContext.disableDefaultConstraintViolation();
|
||||
String message = "Only alphanumeric, underscore, dash, dot, and space are accepted";
|
||||
constraintContext.buildConstraintViolationWithTemplate(message).addConstraintViolation();
|
||||
return false;
|
||||
} else if (value.equals("new")) {
|
||||
constraintContext.disableDefaultConstraintViolation();
|
||||
String message = "'new' is reserved and can not be used as executor name";
|
||||
constraintContext.buildConstraintViolationWithTemplate(message).addConstraintViolation();
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,23 +0,0 @@
|
||||
package io.onedev.server.util.validation.annotation;
|
||||
|
||||
import java.lang.annotation.ElementType;
|
||||
import java.lang.annotation.Retention;
|
||||
import java.lang.annotation.RetentionPolicy;
|
||||
import java.lang.annotation.Target;
|
||||
|
||||
import javax.validation.Constraint;
|
||||
import javax.validation.Payload;
|
||||
|
||||
import io.onedev.server.util.validation.ExecutorNameValidator;
|
||||
|
||||
@Target({ElementType.METHOD, ElementType.FIELD})
|
||||
@Retention(RetentionPolicy.RUNTIME)
|
||||
@Constraint(validatedBy=ExecutorNameValidator.class)
|
||||
public @interface ExecutorName {
|
||||
|
||||
String message() default "";
|
||||
|
||||
Class<?>[] groups() default {};
|
||||
|
||||
Class<? extends Payload>[] payload() default {};
|
||||
}
|
||||
@ -28,6 +28,8 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.unbescape.html.HtmlEscape;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import io.onedev.commons.launcher.loader.Listen;
|
||||
import io.onedev.commons.utils.ExceptionUtils;
|
||||
import io.onedev.commons.utils.WordUtils;
|
||||
@ -38,7 +40,6 @@ import io.onedev.server.event.system.SystemStarted;
|
||||
import io.onedev.server.event.system.SystemStopping;
|
||||
import io.onedev.server.util.JobLogger;
|
||||
import io.onedev.server.web.component.modal.ModalPanel;
|
||||
import jersey.repackaged.com.google.common.collect.Lists;
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public abstract class TaskButton extends AjaxButton {
|
||||
|
||||
@ -9,10 +9,11 @@ import org.apache.wicket.model.IModel;
|
||||
import org.apache.wicket.model.LoadableDetachableModel;
|
||||
import org.apache.wicket.model.Model;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import io.onedev.commons.launcher.loader.AppLoader;
|
||||
import io.onedev.commons.utils.ClassUtils;
|
||||
import io.onedev.server.util.OneContext;
|
||||
import jersey.repackaged.com.google.common.collect.Lists;
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public abstract class PropertyContext<T> implements Serializable {
|
||||
|
||||
@ -26,12 +26,13 @@ import org.apache.wicket.markup.repeater.data.ListDataProvider;
|
||||
import org.apache.wicket.model.IModel;
|
||||
import org.apache.wicket.model.Model;
|
||||
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import io.onedev.commons.utils.StringUtils;
|
||||
import io.onedev.server.util.inputspec.InputSpec;
|
||||
import io.onedev.server.web.editable.BeanContext;
|
||||
import io.onedev.server.web.editable.EditableUtils;
|
||||
import io.onedev.server.web.page.layout.SideFloating;
|
||||
import jersey.repackaged.com.google.common.collect.Sets;
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
class ParamSpecListViewPanel extends Panel {
|
||||
|
||||
@ -5,8 +5,6 @@ import java.lang.reflect.ParameterizedType;
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.List;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import org.apache.wicket.ajax.AjaxRequestTarget;
|
||||
import org.apache.wicket.ajax.markup.html.AjaxLink;
|
||||
import org.apache.wicket.ajax.markup.html.form.AjaxButton;
|
||||
@ -24,7 +22,6 @@ import io.onedev.server.web.component.taskbutton.TaskButton;
|
||||
import io.onedev.server.web.editable.BeanContext;
|
||||
import io.onedev.server.web.editable.BeanEditor;
|
||||
import io.onedev.server.web.editable.BeanUpdating;
|
||||
import io.onedev.server.web.editable.PathElement;
|
||||
import io.onedev.server.web.util.Testable;
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
@ -41,21 +38,6 @@ abstract class JobExecutorEditPanel extends Panel {
|
||||
this.executorIndex = executorIndex;
|
||||
}
|
||||
|
||||
private void checkNameDuplication(BeanEditor editor, JobExecutor executor) {
|
||||
if (executorIndex != -1) {
|
||||
JobExecutor oldExecutor = executors.get(executorIndex);
|
||||
if (!executor.getName().equals(oldExecutor.getName()) && getExecutor(executor.getName()) != null) {
|
||||
editor.getErrorContext(new PathElement.Named("executor"))
|
||||
.getErrorContext(new PathElement.Named("name"))
|
||||
.addError("This name has already been used by another job executor");
|
||||
}
|
||||
} else if (getExecutor(executor.getName()) != null) {
|
||||
editor.getErrorContext(new PathElement.Named("executor"))
|
||||
.getErrorContext(new PathElement.Named("name"))
|
||||
.addError("This name has already been used by another job executor");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onInitialize() {
|
||||
super.onInitialize();
|
||||
@ -73,7 +55,6 @@ abstract class JobExecutorEditPanel extends Panel {
|
||||
super.onSubmit(target, form);
|
||||
|
||||
JobExecutor executor = bean.getExecutor();
|
||||
checkNameDuplication(editor, executor);
|
||||
|
||||
if (!editor.hasErrors(true)) {
|
||||
if (executorIndex != -1) {
|
||||
@ -149,7 +130,6 @@ abstract class JobExecutorEditPanel extends Panel {
|
||||
|
||||
@Override
|
||||
protected void onSubmit(AjaxRequestTarget target, Form<?> form) {
|
||||
checkNameDuplication(editor, bean.getExecutor());
|
||||
if (!editor.hasErrors(true)) {
|
||||
if (testData != null) {
|
||||
new BeanEditModalPanel(target, testData) {
|
||||
@ -208,15 +188,6 @@ abstract class JobExecutorEditPanel extends Panel {
|
||||
setOutputMarkupId(true);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
private JobExecutor getExecutor(String executorName) {
|
||||
for (JobExecutor executor: executors) {
|
||||
if (executorName.equals(executor.getName()))
|
||||
return executor;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected abstract void onSave(AjaxRequestTarget target);
|
||||
|
||||
protected abstract void onCancel(AjaxRequestTarget target);
|
||||
|
||||
@ -7,13 +7,6 @@
|
||||
<artifactId>server-plugin</artifactId>
|
||||
<version>3.0.1</version>
|
||||
</parent>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.onedev</groupId>
|
||||
<artifactId>k8s-helper</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<properties>
|
||||
<moduleClass>io.onedev.server.plugin.kubernetes.KubernetesModule</moduleClass>
|
||||
</properties>
|
||||
|
||||
@ -13,11 +13,14 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import javax.validation.ConstraintValidatorContext;
|
||||
import org.hibernate.validator.constraints.NotEmpty;
|
||||
|
||||
import org.apache.commons.codec.Charsets;
|
||||
import org.hibernate.validator.constraints.NotEmpty;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.unbescape.json.JsonEscape;
|
||||
import org.yaml.snakeyaml.Yaml;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
@ -25,6 +28,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Splitter;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
import io.onedev.commons.utils.ExceptionUtils;
|
||||
import io.onedev.commons.utils.FileUtils;
|
||||
@ -35,19 +39,22 @@ import io.onedev.commons.utils.command.LineConsumer;
|
||||
import io.onedev.k8shelper.KubernetesHelper;
|
||||
import io.onedev.server.OneDev;
|
||||
import io.onedev.server.OneException;
|
||||
import io.onedev.server.ci.job.JobContext;
|
||||
import io.onedev.server.entitymanager.SettingManager;
|
||||
import io.onedev.server.model.support.JobContext;
|
||||
import io.onedev.server.model.support.JobExecutor;
|
||||
import io.onedev.server.plugin.kubernetes.KubernetesExecutor.TestData;
|
||||
import io.onedev.server.util.JobLogger;
|
||||
import io.onedev.server.util.inputspec.SecretInput;
|
||||
import io.onedev.server.util.validation.Validatable;
|
||||
import io.onedev.server.util.validation.annotation.ClassValidating;
|
||||
import io.onedev.server.web.editable.annotation.Editable;
|
||||
import io.onedev.server.web.editable.annotation.NameOfEmptyValue;
|
||||
import io.onedev.server.web.editable.annotation.OmitName;
|
||||
import io.onedev.server.web.util.Testable;
|
||||
import jersey.repackaged.com.google.common.collect.Sets;
|
||||
|
||||
@Editable(order=300)
|
||||
public class KubernetesExecutor extends JobExecutor implements Testable<TestData> {
|
||||
@ClassValidating
|
||||
public class KubernetesExecutor extends JobExecutor implements Testable<TestData>, Validatable {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ -69,10 +76,13 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
|
||||
private String memoryRequest = "128m";
|
||||
|
||||
private String cacheHome;
|
||||
|
||||
@Editable(name="Kubectl Config File", order=100, description=
|
||||
"Specify absolute path to the config file used by kubectl to access the "
|
||||
+ "cluster. Leave empty to have kubectl determining cluster access "
|
||||
+ "information automatically")
|
||||
@NameOfEmptyValue("Use default")
|
||||
public String getConfigFile() {
|
||||
return configFile;
|
||||
}
|
||||
@ -84,6 +94,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
@Editable(name="Path to kubectl", order=200, description=
|
||||
"Specify absolute path to the kubectl utility, for instance: <i>/usr/bin/kubectl</i>. "
|
||||
+ "If left empty, OneDev will try to find the utility from system path")
|
||||
@NameOfEmptyValue("Use default")
|
||||
public String getKubeCtlPath() {
|
||||
return kubeCtlPath;
|
||||
}
|
||||
@ -91,7 +102,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
public void setKubeCtlPath(String kubeCtlPath) {
|
||||
this.kubeCtlPath = kubeCtlPath;
|
||||
}
|
||||
|
||||
|
||||
@Editable(order=20000, group="More Settings", description="Optionally specify Kubernetes namespace "
|
||||
+ "used by this executor to place created Kubernetes resources (such as job pods)")
|
||||
@NotEmpty
|
||||
@ -160,6 +171,27 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
this.memoryRequest = memoryRequest;
|
||||
}
|
||||
|
||||
@Editable(order=40000, group="More Settings", description="Optionally specify an absolute directory on Kubernetes "
|
||||
+ "working nodes to store job caches of this executor. If leave empty, OneDev will use directory "
|
||||
+ "<tt>/onedev-cache</tt> on Linux, and <tt>C:\\onedev-cache</tt> on Windows")
|
||||
@NameOfEmptyValue("Use default")
|
||||
public String getCacheHome() {
|
||||
return cacheHome;
|
||||
}
|
||||
|
||||
public void setCacheHome(String cacheHome) {
|
||||
this.cacheHome = cacheHome;
|
||||
}
|
||||
|
||||
private String getEffectiveCacheHome(String osName) {
|
||||
if (getCacheHome() != null)
|
||||
return cacheHome;
|
||||
else if (osName.equalsIgnoreCase("linux"))
|
||||
return "/onedev-cache";
|
||||
else
|
||||
return "C:\\onedev-cache";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(String jobToken, JobContext jobContext) {
|
||||
execute(jobContext.getEnvironment(), jobToken, jobContext.getLogger(), jobContext);
|
||||
@ -170,15 +202,6 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
execute(testData.getDockerImage(), KubernetesResource.TEST_JOB_TOKEN, logger, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCaches() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cleanDir(File dir) {
|
||||
FileUtils.cleanDir(dir);
|
||||
}
|
||||
|
||||
private Commandline newKubeCtl() {
|
||||
String kubectl = getKubeCtlPath();
|
||||
if (kubectl == null)
|
||||
@ -337,7 +360,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
logger.log(String.format("OS of working node is '%s'", osName));
|
||||
return osName;
|
||||
} else {
|
||||
throw new OneException("No applicable working nodes found for executor '" + getName() + "'");
|
||||
throw new OneException("No applicable working nodes found for executor '" + getEffectiveCacheHome(osName) + "'");
|
||||
}
|
||||
}
|
||||
|
||||
@ -372,22 +395,37 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
"name", "main",
|
||||
"image", dockerImage);
|
||||
|
||||
Map<String, String> emptyDirMount = new LinkedHashMap<>();
|
||||
String classPath;
|
||||
String k8sHelperClassPath;
|
||||
String containerCIHome;
|
||||
String containerWorkspace;
|
||||
String containerCacheHome;
|
||||
if (osName.equalsIgnoreCase("linux")) {
|
||||
containerCIHome = "/onedev-ci";
|
||||
containerWorkspace = containerCIHome + "/workspace";
|
||||
k8sHelperClassPath = "/k8s-helper/*";
|
||||
containerCacheHome = "/onedev-cache";
|
||||
mainContainerSpec.put("command", Lists.newArrayList("sh"));
|
||||
mainContainerSpec.put("args", Lists.newArrayList(".onedev/job-commands-wrapper.sh"));
|
||||
emptyDirMount.put("mountPath", "/onedev-workspace");
|
||||
classPath = "/k8s-helper/*";
|
||||
mainContainerSpec.put("args", Lists.newArrayList(containerCIHome + "/commands.sh"));
|
||||
} else {
|
||||
containerCIHome = "C:\\onedev-ci";
|
||||
containerWorkspace = containerCIHome + "\\workspace";
|
||||
k8sHelperClassPath = "C:\\k8s-helper\\*";
|
||||
containerCacheHome = "C:\\onedev-cache";
|
||||
mainContainerSpec.put("command", Lists.newArrayList("cmd"));
|
||||
mainContainerSpec.put("args", Lists.newArrayList("/c", ".onedev\\job-commands-wrapper.bat"));
|
||||
emptyDirMount.put("mountPath", "C:\\onedev-workspace");
|
||||
classPath = "C:\\k8s-helper\\*";
|
||||
mainContainerSpec.put("args", Lists.newArrayList("/c", containerCIHome + "\\commands.bat"));
|
||||
}
|
||||
mainContainerSpec.put("workingDir", emptyDirMount.get("mountPath"));
|
||||
emptyDirMount.put("name", "workspace");
|
||||
mainContainerSpec.put("volumeMounts", Lists.<Object>newArrayList(emptyDirMount));
|
||||
|
||||
Map<String, String> ciPathMount = Maps.newLinkedHashMap(
|
||||
"name", "ci-home",
|
||||
"mountPath", containerCIHome);
|
||||
Map<String, String> cacheHomeMount = Maps.newLinkedHashMap(
|
||||
"name", "cache-home",
|
||||
"mountPath", containerCacheHome);
|
||||
|
||||
List<Object> volumeMounts = Lists.<Object>newArrayList(ciPathMount, cacheHomeMount);
|
||||
|
||||
mainContainerSpec.put("workingDir", containerWorkspace);
|
||||
mainContainerSpec.put("volumeMounts", volumeMounts);
|
||||
|
||||
mainContainerSpec.put("resources", Maps.newLinkedHashMap("requests", Maps.newLinkedHashMap(
|
||||
"cpu", getCpuRequest(),
|
||||
@ -400,8 +438,8 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
envs.add(serverUrlEnv);
|
||||
envs.addAll(getSecretEnvs(secretName, secrets.keySet()));
|
||||
|
||||
List<String> sidecarArgs = Lists.newArrayList("-classpath", classPath, "io.onedev.k8shelper.SideCar");
|
||||
List<String> initArgs = Lists.newArrayList("-classpath", classPath, "io.onedev.k8shelper.Init");
|
||||
List<String> sidecarArgs = Lists.newArrayList("-classpath", k8sHelperClassPath, "io.onedev.k8shelper.SideCar");
|
||||
List<String> initArgs = Lists.newArrayList("-classpath", k8sHelperClassPath, "io.onedev.k8shelper.Init");
|
||||
if (jobContext == null) {
|
||||
sidecarArgs.add("test");
|
||||
initArgs.add("test");
|
||||
@ -412,7 +450,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
"command", Lists.newArrayList("java"),
|
||||
"args", sidecarArgs,
|
||||
"env", envs,
|
||||
"volumeMounts", Lists.<Object>newArrayList(emptyDirMount));
|
||||
"volumeMounts", volumeMounts);
|
||||
|
||||
Map<Object, Object> initContainerSpec = Maps.newHashMap(
|
||||
"name", "init",
|
||||
@ -420,7 +458,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
"command", Lists.newArrayList("java"),
|
||||
"args", initArgs,
|
||||
"env", envs,
|
||||
"volumeMounts", Lists.<Object>newArrayList(emptyDirMount));
|
||||
"volumeMounts", volumeMounts);
|
||||
|
||||
podSpec.put("containers", Lists.<Object>newArrayList(mainContainerSpec, sidecarContainerSpec));
|
||||
podSpec.put("initContainers", Lists.<Object>newArrayList(initContainerSpec));
|
||||
@ -434,9 +472,17 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
if (getServiceAccount() != null)
|
||||
podSpec.put("serviceAccountName", getServiceAccount());
|
||||
podSpec.put("restartPolicy", "Never");
|
||||
podSpec.put("volumes", Lists.<Object>newArrayList(Maps.newLinkedHashMap(
|
||||
"name", "workspace",
|
||||
"emptyDir", Maps.newLinkedHashMap())));
|
||||
|
||||
Map<Object, Object> ciHomeVolume = Maps.newLinkedHashMap(
|
||||
"name", "ci-home",
|
||||
"emptyDir", Maps.newLinkedHashMap());
|
||||
Map<Object, Object> cacheHomeVolume = Maps.newLinkedHashMap(
|
||||
"name", "cache-home",
|
||||
"hostPath", Maps.newLinkedHashMap(
|
||||
"path", JsonEscape.escapeJson(getEffectiveCacheHome(osName)),
|
||||
"type", "DirectoryOrCreate"));
|
||||
|
||||
podSpec.put("volumes", Lists.<Object>newArrayList(ciHomeVolume, cacheHomeVolume));
|
||||
|
||||
Map<Object, Object> podDef = Maps.newLinkedHashMap(
|
||||
"apiVersion", "v1",
|
||||
@ -449,6 +495,11 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
String podName = createResource(podDef, Sets.newHashSet(), logger);
|
||||
try {
|
||||
logger.log("Preparing job environment...");
|
||||
|
||||
KubernetesExecutor.logger.debug("Checking error events (pod: {})...", podName);
|
||||
// Some errors only reported via events
|
||||
checkEventError(podName, logger);
|
||||
|
||||
KubernetesExecutor.logger.debug("Waiting for init container to start (pod: {})...", podName);
|
||||
watchPod(podName, new StatusChecker() {
|
||||
|
||||
@ -723,6 +774,73 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
}
|
||||
}
|
||||
|
||||
private void checkEventError(String podName, JobLogger logger) {
|
||||
Commandline kubectl = newKubeCtl();
|
||||
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
|
||||
StringBuilder json = new StringBuilder();
|
||||
kubectl.addArgs("get", "event", "-n", getNamespace(), "--field-selector",
|
||||
"involvedObject.kind=Pod,involvedObject.name=" + podName, "--watch",
|
||||
"-o", "json");
|
||||
Thread thread = Thread.currentThread();
|
||||
AtomicReference<StopWatch> stopWatchRef = new AtomicReference<>(null);
|
||||
try {
|
||||
kubectl.execute(new LineConsumer() {
|
||||
|
||||
@Override
|
||||
public void consume(String line) {
|
||||
if (line.startsWith("{")) {
|
||||
json.append("{").append("\n");
|
||||
} else if (line.startsWith("}")) {
|
||||
json.append("}");
|
||||
KubernetesExecutor.logger.trace("Watching event:\n" + json.toString());
|
||||
try {
|
||||
JsonNode eventNode = mapper.readTree(json.toString());
|
||||
String type = eventNode.get("type").asText();
|
||||
String reason = eventNode.get("reason").asText();
|
||||
JsonNode messageNode = eventNode.get("message");
|
||||
String message = messageNode!=null? messageNode.asText(): reason;
|
||||
if (type.equals("Warning")) {
|
||||
if (reason.equals("FailedScheduling"))
|
||||
logger.log("Kubernetes: " + message);
|
||||
else
|
||||
stopWatchRef.set(new StopWatch(new OneException(message)));
|
||||
} else if (type.equals("Normal") && reason.equals("Started")) {
|
||||
stopWatchRef.set(new StopWatch(null));
|
||||
}
|
||||
if (stopWatchRef.get() != null)
|
||||
thread.interrupt();
|
||||
} catch (Exception e) {
|
||||
KubernetesExecutor.logger.error("Error processing event watching record", e);
|
||||
}
|
||||
json.setLength(0);
|
||||
} else {
|
||||
json.append(line).append("\n");
|
||||
}
|
||||
}
|
||||
|
||||
}, new LineConsumer() {
|
||||
|
||||
@Override
|
||||
public void consume(String line) {
|
||||
logger.log("Kubernetes: " + line);
|
||||
}
|
||||
|
||||
}).checkReturnCode();
|
||||
|
||||
throw new OneException("Unexpected end of pod watching");
|
||||
} catch (Exception e) {
|
||||
StopWatch stopWatch = stopWatchRef.get();
|
||||
if (stopWatch != null) {
|
||||
if (stopWatch.getException() != null)
|
||||
throw stopWatch.getException();
|
||||
} else {
|
||||
throw ExceptionUtils.unchecked(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void collectContainerLog(String podName, String containerName, JobLogger logger) {
|
||||
Commandline kubectl = newKubeCtl();
|
||||
kubectl.addArgs("logs", podName, "-c", containerName, "-n", getNamespace(), "--follow");
|
||||
@ -743,6 +861,21 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
|
||||
}).checkReturnCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValid(ConstraintValidatorContext context) {
|
||||
boolean isValid = true;
|
||||
|
||||
if (getCacheHome() != null && FilenameUtils.getPrefixLength(getCacheHome()) == 0) {
|
||||
isValid = false;
|
||||
context.buildConstraintViolationWithTemplate("An absolute path is expected")
|
||||
.addPropertyNode("cacheHome").addConstraintViolation();
|
||||
}
|
||||
|
||||
if (!isValid)
|
||||
context.disableDefaultConstraintViolation();
|
||||
return isValid;
|
||||
}
|
||||
|
||||
@Editable
|
||||
public static class NodeSelectorEntry implements Serializable {
|
||||
|
||||
|
||||
@ -3,6 +3,7 @@ package io.onedev.server.plugin.kubernetes;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
@ -21,12 +22,15 @@ import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.Response;
|
||||
import javax.ws.rs.core.StreamingOutput;
|
||||
|
||||
import org.apache.commons.lang.SerializationUtils;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import io.onedev.commons.utils.TarUtils;
|
||||
import io.onedev.k8shelper.CacheAllocationRequest;
|
||||
import io.onedev.server.OneException;
|
||||
import io.onedev.server.ci.job.JobContext;
|
||||
import io.onedev.server.ci.job.JobManager;
|
||||
import io.onedev.server.model.support.JobContext;
|
||||
|
||||
@Path("/k8s")
|
||||
@Consumes(MediaType.WILDCARD)
|
||||
@ -46,19 +50,29 @@ public class KubernetesResource {
|
||||
}
|
||||
|
||||
@Path("/job-context")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Produces(MediaType.APPLICATION_OCTET_STREAM)
|
||||
@GET
|
||||
public Map<String, Object> getJobContextAsMap() {
|
||||
JobContext context = getJobContext();
|
||||
public byte[] getJobContext() {
|
||||
JobContext context = jobManager.getJobContext(getJobToken(), true);
|
||||
Map<String, Object> contextMap = new HashMap<>();
|
||||
contextMap.put("commands", context.getCommands());
|
||||
contextMap.put("cloneSource", context.isCloneSource());
|
||||
contextMap.put("retrieveSource", context.isRetrieveSource());
|
||||
contextMap.put("projectName", context.getProjectName());
|
||||
contextMap.put("commitHash", context.getCommitId().name());
|
||||
contextMap.put("collectFiles.includes", context.getCollectFiles().getIncludes());
|
||||
contextMap.put("collectFiles.excludes", context.getCollectFiles().getExcludes());
|
||||
|
||||
return contextMap;
|
||||
return SerializationUtils.serialize((Serializable) contextMap);
|
||||
}
|
||||
|
||||
@Path("/allocate-job-caches")
|
||||
@Consumes(MediaType.APPLICATION_OCTET_STREAM)
|
||||
@Produces(MediaType.APPLICATION_OCTET_STREAM)
|
||||
@POST
|
||||
public byte[] allocateJobCaches(byte[] cacheAllocationRequestBytes) {
|
||||
CacheAllocationRequest allocationRequest = (CacheAllocationRequest) SerializationUtils
|
||||
.deserialize(cacheAllocationRequestBytes);
|
||||
return SerializationUtils.serialize((Serializable) jobManager.allocateJobCaches(
|
||||
getJobToken(), allocationRequest.getCurrentTime(), allocationRequest.getInstances()));
|
||||
}
|
||||
|
||||
@Path("/download-dependencies")
|
||||
@ -69,8 +83,9 @@ public class KubernetesResource {
|
||||
|
||||
@Override
|
||||
public void write(OutputStream output) throws IOException {
|
||||
JobContext context = getJobContext();
|
||||
TarUtils.tar(context.getServerWorkspace(), Lists.newArrayList("**"), new ArrayList<>(), output);
|
||||
JobContext context = jobManager.getJobContext(getJobToken(), true);
|
||||
TarUtils.tar(context.getServerWorkspace(), Lists.newArrayList("**"),
|
||||
new ArrayList<>(), output);
|
||||
output.flush();
|
||||
}
|
||||
|
||||
@ -82,7 +97,7 @@ public class KubernetesResource {
|
||||
@Path("/upload-outcomes")
|
||||
@Consumes(MediaType.APPLICATION_OCTET_STREAM)
|
||||
public Response uploadOutcomes(InputStream is) {
|
||||
JobContext context = getJobContext();
|
||||
JobContext context = jobManager.getJobContext(getJobToken(), true);
|
||||
TarUtils.untar(is, context.getServerWorkspace());
|
||||
return Response.ok().build();
|
||||
}
|
||||
@ -97,14 +112,11 @@ public class KubernetesResource {
|
||||
return Response.status(400).entity("Invalid or no job token").build();
|
||||
}
|
||||
|
||||
private JobContext getJobContext() {
|
||||
private String getJobToken() {
|
||||
String jobToken = request.getHeader(JobManager.JOB_TOKEN_HTTP_HEADER);
|
||||
if (jobToken == null)
|
||||
throw new OneException("Http header '" + JobManager.JOB_TOKEN_HTTP_HEADER + "' is expected");
|
||||
JobContext context = jobManager.getJobContext(jobToken);
|
||||
if (context == null)
|
||||
throw new OneException("No job context found for specified job token");
|
||||
return context;
|
||||
return jobToken;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -13,8 +13,8 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import io.onedev.server.ci.CISpec;
|
||||
import io.onedev.server.ci.DefaultCISpecProvider;
|
||||
import io.onedev.server.ci.job.CacheSpec;
|
||||
import io.onedev.server.ci.job.Job;
|
||||
import io.onedev.server.ci.job.cache.JobCache;
|
||||
import io.onedev.server.ci.job.trigger.BranchUpdateTrigger;
|
||||
import io.onedev.server.git.Blob;
|
||||
import io.onedev.server.git.BlobIdent;
|
||||
@ -97,7 +97,7 @@ public class DefaultMavenCISpecProvider implements DefaultCISpecProvider {
|
||||
* Cache Maven local repository in order not to download Maven dependencies all over again for
|
||||
* subsequent builds
|
||||
*/
|
||||
JobCache cache = new JobCache();
|
||||
CacheSpec cache = new CacheSpec();
|
||||
cache.setKey("maven-local-repository");
|
||||
cache.setPath("/root/.m2");
|
||||
job.getCaches().add(cache);
|
||||
|
||||
@ -6,13 +6,14 @@ import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import javax.validation.ConstraintValidatorContext;
|
||||
|
||||
@ -27,19 +28,20 @@ import com.google.common.base.Joiner;
|
||||
|
||||
import io.onedev.commons.launcher.bootstrap.Bootstrap;
|
||||
import io.onedev.commons.utils.FileUtils;
|
||||
import io.onedev.commons.utils.LockUtils;
|
||||
import io.onedev.commons.utils.PathUtils;
|
||||
import io.onedev.commons.utils.StringUtils;
|
||||
import io.onedev.commons.utils.command.Commandline;
|
||||
import io.onedev.commons.utils.command.LineConsumer;
|
||||
import io.onedev.commons.utils.command.ProcessKiller;
|
||||
import io.onedev.commons.utils.concurrent.CapacityRunner;
|
||||
import io.onedev.server.ci.job.cache.CacheAllocation;
|
||||
import io.onedev.server.ci.job.cache.CacheCallable;
|
||||
import io.onedev.server.ci.job.cache.CacheRunner;
|
||||
import io.onedev.server.ci.job.cache.JobCache;
|
||||
import io.onedev.server.model.support.JobContext;
|
||||
import io.onedev.k8shelper.CacheInstance;
|
||||
import io.onedev.k8shelper.KubernetesHelper;
|
||||
import io.onedev.server.OneDev;
|
||||
import io.onedev.server.ci.job.JobContext;
|
||||
import io.onedev.server.ci.job.JobManager;
|
||||
import io.onedev.server.model.support.JobExecutor;
|
||||
import io.onedev.server.plugin.serverdocker.ServerDockerExecutor.TestData;
|
||||
import io.onedev.server.storage.StorageManager;
|
||||
import io.onedev.server.util.JobLogger;
|
||||
import io.onedev.server.util.OneContext;
|
||||
import io.onedev.server.util.validation.Validatable;
|
||||
@ -72,8 +74,9 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
|
||||
|
||||
private transient CapacityRunner capacityRunner;
|
||||
|
||||
@Editable(order=20000, group="More Settings", description="Optionally specify docker executable, for instance <i>/usr/local/bin/docker</i>. "
|
||||
@Editable(order=100, description="Optionally specify docker executable, for instance <i>/usr/local/bin/docker</i>. "
|
||||
+ "Leave empty to use docker executable in PATH")
|
||||
@NameOfEmptyValue("Use default")
|
||||
public String getDockerExecutable() {
|
||||
return dockerExecutable;
|
||||
}
|
||||
@ -155,134 +158,152 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
|
||||
return capacityRunner;
|
||||
}
|
||||
|
||||
private File getCacheHome() {
|
||||
return new File(Bootstrap.getCacheDir(), getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void execute(String jobToken, JobContext jobContext) {
|
||||
JobLogger logger = jobContext.getLogger();
|
||||
|
||||
getCapacityRunner().call(new Callable<Void>() {
|
||||
|
||||
@Override
|
||||
public Void call() {
|
||||
return new CacheRunner(getCacheHome(), jobContext.getCaches()).call(new CacheCallable<Void>() {
|
||||
|
||||
@Override
|
||||
public Void call(Collection<CacheAllocation> allocations) {
|
||||
jobContext.notifyJobRunning();
|
||||
|
||||
login(logger);
|
||||
|
||||
logger.log("Pulling image...") ;
|
||||
Commandline docker = getDocker();
|
||||
docker.addArgs("pull", jobContext.getEnvironment());
|
||||
docker.execute(newDebugLogger(), newJobLogger(logger)).checkReturnCode();
|
||||
|
||||
docker.clearArgs();
|
||||
String jobInstance = UUID.randomUUID().toString();
|
||||
docker.addArgs("run", "--rm", "--name", jobInstance);
|
||||
for (Map.Entry<String, String> entry: jobContext.getEnvVars().entrySet())
|
||||
docker.addArgs("--env", entry.getKey() + "=" + entry.getValue());
|
||||
if (getRunOptions() != null)
|
||||
docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions()));
|
||||
|
||||
String imageOS = getImageOS(logger, jobContext.getEnvironment());
|
||||
logger.log("Detected image OS: " + imageOS);
|
||||
|
||||
boolean windows = imageOS.equals("windows");
|
||||
|
||||
String dockerWorkspacePath;
|
||||
if (windows)
|
||||
dockerWorkspacePath = "C:\\" + WORKSPACE;
|
||||
else
|
||||
dockerWorkspacePath = "/" + WORKSPACE;
|
||||
|
||||
File workspaceCache = null;
|
||||
for (CacheAllocation allocation: allocations) {
|
||||
if (allocation.isWorkspace()) {
|
||||
workspaceCache = allocation.getInstance();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
File effectiveWorkspace = workspaceCache != null? workspaceCache: jobContext.getServerWorkspace();
|
||||
|
||||
if (jobContext.isCloneSource()) {
|
||||
logger.log("Cloning source code...");
|
||||
jobContext.checkoutSource(effectiveWorkspace);
|
||||
}
|
||||
|
||||
if (workspaceCache != null) {
|
||||
try {
|
||||
FileUtils.copyDirectory(jobContext.getServerWorkspace(), workspaceCache);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
docker.addArgs("-v", effectiveWorkspace.getAbsolutePath() + ":" + dockerWorkspacePath);
|
||||
for (CacheAllocation allocation: allocations) {
|
||||
if (!allocation.isWorkspace())
|
||||
docker.addArgs("-v", allocation.getInstance().getAbsolutePath() + ":" + allocation.resolvePath(dockerWorkspacePath));
|
||||
}
|
||||
docker.addArgs("-w", dockerWorkspacePath);
|
||||
|
||||
if (windows) {
|
||||
File scriptFile = new File(effectiveWorkspace, "onedev-job-commands.bat");
|
||||
try {
|
||||
FileUtils.writeLines(scriptFile, jobContext.getCommands(), "\r\n");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
docker.addArgs(jobContext.getEnvironment());
|
||||
docker.addArgs("cmd", "/c", dockerWorkspacePath + "\\onedev-job-commands.bat");
|
||||
} else {
|
||||
File scriptFile = new File(effectiveWorkspace, "onedev-job-commands.sh");
|
||||
try {
|
||||
FileUtils.writeLines(scriptFile, jobContext.getCommands(), "\n");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
docker.addArgs(jobContext.getEnvironment());
|
||||
docker.addArgs("sh", dockerWorkspacePath + "/onedev-job-commands.sh");
|
||||
}
|
||||
|
||||
logger.log("Running container to execute job...");
|
||||
|
||||
try {
|
||||
docker.execute(newDebugLogger(), newJobLogger(logger), null, new ProcessKiller() {
|
||||
File hostCIHome = FileUtils.createTempDir("onedev-ci");
|
||||
try {
|
||||
JobLogger logger = jobContext.getLogger();
|
||||
|
||||
getCapacityRunner().call(new Callable<Void>() {
|
||||
|
||||
@Override
|
||||
public void kill(Process process) {
|
||||
logger.log("Stopping container...");
|
||||
Commandline cmd = getDocker();
|
||||
cmd.addArgs("stop", jobInstance);
|
||||
cmd.execute(newDebugLogger(), newJobLogger(logger)).checkReturnCode();
|
||||
}
|
||||
|
||||
}).checkReturnCode();
|
||||
@Override
|
||||
public Void call() {
|
||||
jobContext.notifyJobRunning();
|
||||
|
||||
File hostCacheHome = getCacheHome();
|
||||
|
||||
Map<CacheInstance, Date> cacheInstances = KubernetesHelper.getCacheInstances(hostCacheHome);
|
||||
Map<CacheInstance, String> cacheAllocations = OneDev.getInstance(JobManager.class)
|
||||
.allocateJobCaches(jobToken, new Date(), cacheInstances);
|
||||
KubernetesHelper.preprocess(hostCacheHome, cacheAllocations, new Consumer<File>() {
|
||||
|
||||
@Override
|
||||
public void accept(File directory) {
|
||||
cleanDirAsRoot(directory);
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
login(logger);
|
||||
|
||||
logger.log("Pulling image...") ;
|
||||
Commandline docker = getDocker();
|
||||
docker.addArgs("pull", jobContext.getEnvironment());
|
||||
docker.execute(newDebugLogger(), newJobLogger(logger)).checkReturnCode();
|
||||
|
||||
docker.clearArgs();
|
||||
String dockerInstance = UUID.randomUUID().toString();
|
||||
docker.addArgs("run", "--rm", "--name", dockerInstance);
|
||||
for (Map.Entry<String, String> entry: jobContext.getEnvVars().entrySet())
|
||||
docker.addArgs("--env", entry.getKey() + "=" + entry.getValue());
|
||||
if (getRunOptions() != null)
|
||||
docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions()));
|
||||
|
||||
String imageOS = getImageOS(logger, jobContext.getEnvironment());
|
||||
logger.log("Detected image OS: " + imageOS);
|
||||
|
||||
boolean isWindows = imageOS.equals("windows");
|
||||
|
||||
File workspaceCache = null;
|
||||
for (Map.Entry<CacheInstance, String> entry: cacheAllocations.entrySet()) {
|
||||
if (PathUtils.isCurrent(entry.getValue())) {
|
||||
workspaceCache = entry.getKey().getDirectory(hostCacheHome);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
File hostWorkspace;
|
||||
if (workspaceCache != null) {
|
||||
hostWorkspace = workspaceCache;
|
||||
} else {
|
||||
hostWorkspace = new File(hostCIHome, "workspace");
|
||||
FileUtils.createDir(hostWorkspace);
|
||||
}
|
||||
|
||||
if (jobContext.isRetrieveSource()) {
|
||||
logger.log("Retrieving source code...");
|
||||
jobContext.retrieveSource(hostWorkspace);
|
||||
}
|
||||
|
||||
try {
|
||||
FileUtils.copyDirectory(jobContext.getServerWorkspace(), hostWorkspace);
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
String containerCIHome;
|
||||
String containerWorkspace;
|
||||
String[] containerCommand;
|
||||
if (isWindows) {
|
||||
containerCIHome = "C:\\onedev-ci";
|
||||
containerWorkspace = "C:\\onedev-ci\\workspace";
|
||||
containerCommand = new String[] {"cmd", "/c", "C:\\onedev-ci\\job-commands.bat"};
|
||||
|
||||
File scriptFile = new File(hostCIHome, "job-commands.bat");
|
||||
try {
|
||||
FileUtils.writeLines(scriptFile, jobContext.getCommands(), "\r\n");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
} else {
|
||||
containerCIHome = "/onedev-ci";
|
||||
containerWorkspace = "/onedev-ci/workspace";
|
||||
containerCommand = new String[] {"sh", "/onedev-ci/job-commands.sh"};
|
||||
|
||||
File scriptFile = new File(hostCIHome, "job-commands.sh");
|
||||
try {
|
||||
FileUtils.writeLines(scriptFile, jobContext.getCommands(), "\n");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
docker.addArgs("-v", hostCIHome.getAbsolutePath() + ":" + containerCIHome);
|
||||
if (workspaceCache != null)
|
||||
docker.addArgs("-v", workspaceCache.getAbsolutePath() + ":" + containerWorkspace);
|
||||
for (Map.Entry<CacheInstance, String> entry: cacheAllocations.entrySet()) {
|
||||
if (!PathUtils.isCurrent(entry.getValue())) {
|
||||
String hostCachePath = entry.getKey().getDirectory(hostCacheHome).getAbsolutePath();
|
||||
String containerCachePath = PathUtils.resolve(containerWorkspace, entry.getValue());
|
||||
docker.addArgs("-v", hostCachePath + ":" + containerCachePath);
|
||||
}
|
||||
}
|
||||
docker.addArgs("-w", containerWorkspace, jobContext.getEnvironment());
|
||||
docker.addArgs(containerCommand);
|
||||
|
||||
logger.log("Running container to execute job...");
|
||||
|
||||
try {
|
||||
docker.execute(newJobLogger(logger), newJobLogger(logger), null, new ProcessKiller() {
|
||||
|
||||
@Override
|
||||
public void kill(Process process) {
|
||||
logger.log("Stopping container...");
|
||||
Commandline cmd = getDocker();
|
||||
cmd.addArgs("stop", dockerInstance);
|
||||
cmd.execute(newDebugLogger(), newJobLogger(logger)).checkReturnCode();
|
||||
}
|
||||
|
||||
return null;
|
||||
} finally {
|
||||
if (workspaceCache != null) {
|
||||
int baseLen = workspaceCache.getAbsolutePath().length()+1;
|
||||
for (File file: jobContext.getCollectFiles().listFiles(workspaceCache)) {
|
||||
try {
|
||||
FileUtils.copyFile(file, new File(jobContext.getServerWorkspace(), file.getAbsolutePath().substring(baseLen)));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}).checkReturnCode();
|
||||
} finally {
|
||||
int baseLen = hostWorkspace.getAbsolutePath().length()+1;
|
||||
for (File file: jobContext.getCollectFiles().listFiles(hostWorkspace)) {
|
||||
try {
|
||||
FileUtils.copyFile(file, new File(jobContext.getServerWorkspace(), file.getAbsolutePath().substring(baseLen)));
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
});
|
||||
} finally {
|
||||
cleanDirAsRoot(hostCIHome);
|
||||
FileUtils.deleteDir(hostCIHome);
|
||||
}
|
||||
}
|
||||
|
||||
private LineConsumer newJobLogger(JobLogger logger) {
|
||||
@ -344,36 +365,6 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCaches() {
|
||||
File cacheHome = getCacheHome();
|
||||
if (cacheHome.exists()) {
|
||||
for (File keyDir: cacheHome.listFiles()) {
|
||||
for (File cacheInstance: keyDir.listFiles()) {
|
||||
if (System.currentTimeMillis() - cacheInstance.lastModified() > getCacheTTL() * 24L * 3600L * 1000L) {
|
||||
File lockFile = new File(cacheInstance, JobCache.LOCK_FILE);
|
||||
try {
|
||||
if (lockFile.createNewFile()) {
|
||||
LockUtils.call(keyDir.getAbsolutePath(), new Callable<Void>() {
|
||||
|
||||
@Override
|
||||
public Void call() throws Exception {
|
||||
cleanDir(cacheInstance);
|
||||
FileUtils.deleteDir(cacheInstance);
|
||||
return null;
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("Error removing cache '" + cacheInstance.getAbsolutePath() + "'", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isValid(ConstraintValidatorContext context) {
|
||||
boolean isValid = true;
|
||||
@ -408,6 +399,10 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
|
||||
return isValid;
|
||||
}
|
||||
|
||||
private File getCacheHome() {
|
||||
return OneDev.getInstance(StorageManager.class).getJobCacheDir();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void test(TestData testData, JobLogger logger) {
|
||||
login(logger);
|
||||
@ -421,13 +416,11 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
|
||||
boolean windows = getImageOS(logger, testData.getDockerImage()).equals("windows");
|
||||
|
||||
logger.log("Running container...");
|
||||
File cacheHome = getCacheHome();
|
||||
boolean cacheHomeExists = cacheHome.exists();
|
||||
File workspaceDir = null;
|
||||
File cacheDir = null;
|
||||
try {
|
||||
workspaceDir = Bootstrap.createTempDir("workspace");
|
||||
cacheDir = new File(cacheHome, UUID.randomUUID().toString());
|
||||
cacheDir = new File(getCacheHome(), UUID.randomUUID().toString());
|
||||
FileUtils.createDir(cacheDir);
|
||||
|
||||
cmd.clearArgs();
|
||||
@ -435,13 +428,13 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
|
||||
if (getRunOptions() != null)
|
||||
cmd.addArgs(StringUtils.parseQuoteTokens(getRunOptions()));
|
||||
String containerWorkspacePath;
|
||||
String containerCachePath = "$onedev-cache-test$";
|
||||
String containerCachePath;
|
||||
if (windows) {
|
||||
containerWorkspacePath = "C:\\" + WORKSPACE;
|
||||
containerCachePath = "C:\\" + containerCachePath;
|
||||
containerWorkspacePath = "C:\\onedev-ci\\workspace";
|
||||
containerCachePath = "C:\\onedev-cache";
|
||||
} else {
|
||||
containerWorkspacePath = "/" + WORKSPACE;
|
||||
containerCachePath = "/" + containerCachePath;
|
||||
containerWorkspacePath = "/onedev-ci/workspace";
|
||||
containerCachePath = "/onedev-cache";
|
||||
}
|
||||
cmd.addArgs("-v", workspaceDir.getAbsolutePath() + ":" + containerWorkspacePath);
|
||||
cmd.addArgs("-v", cacheDir.getAbsolutePath() + ":" + containerCachePath);
|
||||
@ -460,8 +453,6 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
|
||||
FileUtils.deleteDir(workspaceDir);
|
||||
if (cacheDir != null)
|
||||
FileUtils.deleteDir(cacheDir);
|
||||
if (!cacheHomeExists)
|
||||
FileUtils.deleteDir(cacheHome);
|
||||
}
|
||||
|
||||
if (!SystemUtils.IS_OS_WINDOWS) {
|
||||
@ -472,13 +463,12 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void cleanDir(File dir) {
|
||||
public void cleanDirAsRoot(File dir) {
|
||||
if (SystemUtils.IS_OS_WINDOWS) {
|
||||
FileUtils.cleanDir(dir);
|
||||
} else {
|
||||
Commandline cmd = getDocker();
|
||||
String containerPath = "/onedev_dir_to_clean";
|
||||
String containerPath = "/dir-to-clean";
|
||||
cmd.addArgs("run", "-v", dir.getAbsolutePath() + ":" + containerPath, "--rm",
|
||||
"busybox", "sh", "-c", "rm -rf " + containerPath + "/*");
|
||||
cmd.execute(new LineConsumer() {
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user