Fix issue #1056 - Limiting overall memory usage and/or number of concurrent jobs

This commit is contained in:
Robin Shen 2022-12-29 20:09:54 +08:00
parent 58e48ce493
commit d694a5e036
73 changed files with 4138 additions and 4585 deletions

View File

@ -9,7 +9,7 @@
<version>1.2.0</version>
</parent>
<artifactId>server</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
<packaging>pom</packaging>
<build>
<finalName>${project.groupId}.${project.artifactId}-${project.version}</finalName>
@ -620,7 +620,7 @@
</repositories>
<properties>
<commons.version>2.7.3</commons.version>
<agent.version>1.7.12</agent.version>
<agent.version>1.7.14</agent.version>
<slf4j.version>1.7.36</slf4j.version>
<logback.version>1.2.11</logback.version>
<antlr.version>4.7.2</antlr.version>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<build>
<plugins>

View File

@ -34,8 +34,4 @@ public interface ServerConfig {
int getClusterPort();
int getServerCpu();
int getServerMemory();
}

View File

@ -19,7 +19,7 @@ import io.onedev.agent.AgentData;
import io.onedev.agent.CallData;
import io.onedev.agent.Message;
import io.onedev.agent.MessageTypes;
import io.onedev.agent.WaitingForAgentResourceToBeReleased;
import io.onedev.agent.WantToDisconnectAgent;
import io.onedev.agent.WebsocketUtils;
import io.onedev.commons.utils.ExplicitException;
import io.onedev.commons.utils.StringUtils;
@ -188,9 +188,9 @@ public class ServerSocket {
private Serializable service(Serializable request) {
try {
if (request instanceof WaitingForAgentResourceToBeReleased) {
if (request instanceof WantToDisconnectAgent) {
if (agentId != null)
OneDev.getInstance(ResourceAllocator.class).waitingForAgentResourceToBeReleased(agentId);
OneDev.getInstance(ResourceAllocator.class).wantToDisconnectAgent(agentId);
return null;
} else {
throw new ExplicitException("Unknown request: " + request.getClass());

View File

@ -1,148 +1,123 @@
package io.onedev.server.buildspec;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.validation.constraints.NotEmpty;
import io.onedev.commons.codeassist.InputCompletion;
import io.onedev.commons.codeassist.InputStatus;
import io.onedev.commons.codeassist.InputSuggestion;
import io.onedev.server.buildspec.job.EnvVar;
import io.onedev.server.util.validation.annotation.DnsName;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Interpolative;
import io.onedev.server.web.editable.annotation.SuggestionProvider;
@Editable
public class Service implements NamedElement, Serializable {
private static final long serialVersionUID = 1L;
private String name;
private String image;
private String arguments;
private List<EnvVar> envVars = new ArrayList<>();
private String readinessCheckCommand;
private int cpuRequirement = 250;
private int memoryRequirement = 256;
@Editable(order=100, description="Specify name of the service, which will be used as host name to access the service")
@SuggestionProvider("getNameSuggestions")
@DnsName
@NotEmpty
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@SuppressWarnings("unused")
private static List<InputCompletion> getNameSuggestions(InputStatus status) {
BuildSpec buildSpec = BuildSpec.get();
if (buildSpec != null) {
List<String> candidates = new ArrayList<>(buildSpec.getServiceMap().keySet());
buildSpec.getServices().forEach(it->candidates.remove(it.getName()));
return BuildSpec.suggestOverrides(candidates, status);
}
return new ArrayList<>();
}
@Editable(order=200, description="Specify docker image of the service")
@Interpolative(variableSuggester="suggestVariables")
@NotEmpty
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
@Editable(order=220, description="Optionally specify arguments to run above image")
@Interpolative(variableSuggester="suggestVariables")
public String getArguments() {
return arguments;
}
public void setArguments(String arguments) {
this.arguments = arguments;
}
@Editable(order=300, name="Environment Variables", description="Optionally specify environment variables of "
+ "the service")
public List<EnvVar> getEnvVars() {
return envVars;
}
public void setEnvVars(List<EnvVar> envVars) {
this.envVars = envVars;
}
@Editable(order=400, description="Specify command to check readiness of the service. This command will "
+ "be interpretated by cmd.exe on Windows images, and by shell on Linux images. It will be "
+ "executed repeatedly until a zero code is returned to indicate service ready")
@Interpolative(variableSuggester="suggestVariables")
@NotEmpty
public String getReadinessCheckCommand() {
return readinessCheckCommand;
}
public void setReadinessCheckCommand(String readinessCheckCommand) {
this.readinessCheckCommand = readinessCheckCommand;
}
@Editable(order=10000, name="CPU Requirement", group="More Settings", description="Specify CPU requirement of the service in millis. "
+ "1000 millis means a single CPU core")
public int getCpuRequirement() {
return cpuRequirement;
}
public void setCpuRequirement(int cpuRequirement) {
this.cpuRequirement = cpuRequirement;
}
@Editable(order=10100, group="More Settings", description="Specify memory requirement of the service in mega bytes")
public int getMemoryRequirement() {
return memoryRequirement;
}
public void setMemoryRequirement(int memoryRequirement) {
this.memoryRequirement = memoryRequirement;
}
@SuppressWarnings("unused")
private static List<InputSuggestion> suggestVariables(String matchWith) {
return BuildSpec.suggestVariables(matchWith, false, false, false);
}
public Map<String, Serializable> toMap() {
Map<String, Serializable> serviceMap = new HashMap<>();
serviceMap.put("name", getName());
serviceMap.put("image", getImage());
serviceMap.put("readinessCheckCommand", getReadinessCheckCommand());
serviceMap.put("cpuRequirement", getCpuRequirement());
serviceMap.put("memoryRequirement", getMemoryRequirement());
serviceMap.put("arguments", getArguments());
Map<String, String> envVars = new HashMap<>();
for (EnvVar var: getEnvVars())
envVars.put(var.getName(), var.getValue());
serviceMap.put("envVars", (Serializable) envVars);
return serviceMap;
}
}
package io.onedev.server.buildspec;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.validation.constraints.NotEmpty;
import io.onedev.commons.codeassist.InputCompletion;
import io.onedev.commons.codeassist.InputStatus;
import io.onedev.commons.codeassist.InputSuggestion;
import io.onedev.server.buildspec.job.EnvVar;
import io.onedev.server.util.validation.annotation.DnsName;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Interpolative;
import io.onedev.server.web.editable.annotation.SuggestionProvider;
@Editable
public class Service implements NamedElement, Serializable {
private static final long serialVersionUID = 1L;
private String name;
private String image;
private String arguments;
private List<EnvVar> envVars = new ArrayList<>();
private String readinessCheckCommand;
@Editable(order=100, description="Specify name of the service, which will be used as host name to access the service")
@SuggestionProvider("getNameSuggestions")
@DnsName
@NotEmpty
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@SuppressWarnings("unused")
private static List<InputCompletion> getNameSuggestions(InputStatus status) {
BuildSpec buildSpec = BuildSpec.get();
if (buildSpec != null) {
List<String> candidates = new ArrayList<>(buildSpec.getServiceMap().keySet());
buildSpec.getServices().forEach(it->candidates.remove(it.getName()));
return BuildSpec.suggestOverrides(candidates, status);
}
return new ArrayList<>();
}
@Editable(order=200, description="Specify docker image of the service")
@Interpolative(variableSuggester="suggestVariables")
@NotEmpty
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
@Editable(order=220, description="Optionally specify arguments to run above image")
@Interpolative(variableSuggester="suggestVariables")
public String getArguments() {
return arguments;
}
public void setArguments(String arguments) {
this.arguments = arguments;
}
@Editable(order=300, name="Environment Variables", description="Optionally specify environment variables of "
+ "the service")
public List<EnvVar> getEnvVars() {
return envVars;
}
public void setEnvVars(List<EnvVar> envVars) {
this.envVars = envVars;
}
@Editable(order=400, description="Specify command to check readiness of the service. This command will "
+ "be interpretated by cmd.exe on Windows images, and by shell on Linux images. It will be "
+ "executed repeatedly until a zero code is returned to indicate service ready")
@Interpolative(variableSuggester="suggestVariables")
@NotEmpty
public String getReadinessCheckCommand() {
return readinessCheckCommand;
}
public void setReadinessCheckCommand(String readinessCheckCommand) {
this.readinessCheckCommand = readinessCheckCommand;
}
@SuppressWarnings("unused")
private static List<InputSuggestion> suggestVariables(String matchWith) {
return BuildSpec.suggestVariables(matchWith, false, false, false);
}
public Map<String, Serializable> toMap() {
Map<String, Serializable> serviceMap = new HashMap<>();
serviceMap.put("name", getName());
serviceMap.put("image", getImage());
serviceMap.put("readinessCheckCommand", getReadinessCheckCommand());
serviceMap.put("arguments", getArguments());
Map<String, String> envVars = new HashMap<>();
for (EnvVar var: getEnvVars())
envVars.put(var.getName(), var.getValue());
serviceMap.put("envVars", (Serializable) envVars);
return serviceMap;
}
}

View File

@ -1,496 +1,473 @@
package io.onedev.server.buildspec.job;
import static io.onedev.server.model.Build.NAME_BRANCH;
import static io.onedev.server.model.Build.NAME_COMMIT;
import static io.onedev.server.model.Build.NAME_JOB;
import static io.onedev.server.model.Build.NAME_PULL_REQUEST;
import static io.onedev.server.model.Build.NAME_TAG;
import static io.onedev.server.search.entity.build.BuildQuery.getRuleName;
import static io.onedev.server.search.entity.build.BuildQueryLexer.And;
import static io.onedev.server.search.entity.build.BuildQueryLexer.InPipelineOf;
import static io.onedev.server.search.entity.build.BuildQueryLexer.Is;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.validation.ConstraintValidatorContext;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotEmpty;
import org.apache.wicket.Component;
import org.eclipse.jgit.lib.ObjectId;
import io.onedev.commons.codeassist.InputCompletion;
import io.onedev.commons.codeassist.InputStatus;
import io.onedev.commons.codeassist.InputSuggestion;
import io.onedev.server.OneDev;
import io.onedev.server.buildspec.BuildSpec;
import io.onedev.server.buildspec.BuildSpecAware;
import io.onedev.server.buildspec.NamedElement;
import io.onedev.server.buildspec.job.action.PostBuildAction;
import io.onedev.server.buildspec.job.projectdependency.ProjectDependency;
import io.onedev.server.buildspec.job.trigger.JobTrigger;
import io.onedev.server.buildspec.param.ParamUtils;
import io.onedev.server.buildspec.param.spec.ParamSpec;
import io.onedev.server.buildspec.step.Step;
import io.onedev.server.entitymanager.SettingManager;
import io.onedev.server.event.project.ProjectEvent;
import io.onedev.server.git.GitUtils;
import io.onedev.server.job.authorization.JobAuthorization;
import io.onedev.server.job.authorization.JobAuthorization.Context;
import io.onedev.server.model.Build;
import io.onedev.server.model.PullRequest;
import io.onedev.server.model.support.administration.jobexecutor.JobExecutor;
import io.onedev.server.util.ComponentContext;
import io.onedev.server.util.EditContext;
import io.onedev.server.util.criteria.Criteria;
import io.onedev.server.util.validation.Validatable;
import io.onedev.server.util.validation.annotation.ClassValidating;
import io.onedev.server.web.editable.annotation.ChoiceProvider;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Interpolative;
import io.onedev.server.web.editable.annotation.RetryCondition;
import io.onedev.server.web.editable.annotation.SuggestionProvider;
import io.onedev.server.web.page.project.blob.ProjectBlobPage;
import io.onedev.server.web.util.SuggestionUtils;
import io.onedev.server.web.util.WicketUtils;
@Editable
@ClassValidating
public class Job implements NamedElement, Serializable, Validatable {
private static final long serialVersionUID = 1L;
public static final String SELECTION_PREFIX = "jobs/";
public static final String PROP_NAME = "name";
public static final String PROP_JOB_DEPENDENCIES = "jobDependencies";
public static final String PROP_REQUIRED_SERVICES = "requiredServices";
public static final String PROP_TRIGGERS = "triggers";
public static final String PROP_STEPS = "steps";
public static final String PROP_RETRY_CONDITION = "retryCondition";
public static final String PROP_POST_BUILD_ACTIONS = "postBuildActions";
private String name;
private String jobExecutor;
private List<Step> steps = new ArrayList<>();
private List<ParamSpec> paramSpecs = new ArrayList<>();
private List<JobDependency> jobDependencies = new ArrayList<>();
private List<ProjectDependency> projectDependencies = new ArrayList<>();
private List<String> requiredServices = new ArrayList<>();
private List<JobTrigger> triggers = new ArrayList<>();
private List<CacheSpec> caches = new ArrayList<>();
private int cpuRequirement = 250;
private int memoryRequirement = 256;
private long timeout = 3600;
private List<PostBuildAction> postBuildActions = new ArrayList<>();
private String retryCondition = "never";
private int maxRetries = 3;
private int retryDelay = 30;
private transient Map<String, ParamSpec> paramSpecMap;
@Editable(order=100, description="Specify name of the job")
@SuggestionProvider("getNameSuggestions")
@NotEmpty
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@SuppressWarnings("unused")
private static List<InputCompletion> getNameSuggestions(InputStatus status) {
BuildSpec buildSpec = BuildSpec.get();
if (buildSpec != null) {
List<String> candidates = new ArrayList<>(buildSpec.getJobMap().keySet());
buildSpec.getJobs().forEach(it->candidates.remove(it.getName()));
return BuildSpec.suggestOverrides(candidates, status);
}
return new ArrayList<>();
}
@Editable(order=200, placeholder="Use Any Applicable Executor", description="Optionally specify authorized executor "
+ "for this job. Leave empty to use first authorized executor")
@Interpolative(literalSuggester="suggestJobExecutors", variableSuggester="suggestVariables")
public String getJobExecutor() {
return jobExecutor;
}
public void setJobExecutor(String jobExecutor) {
this.jobExecutor = jobExecutor;
}
@SuppressWarnings("unused")
private static List<InputSuggestion> suggestJobExecutors(String matchWith) {
List<String> applicableJobExecutors = new ArrayList<>();
ProjectBlobPage page = (ProjectBlobPage) WicketUtils.getPage();
String jobName = (String) EditContext.get().getInputValue(PROP_NAME);
if (jobName != null) {
Context context = new Context(page.getProject(), page.getBlobIdent().revision, jobName);
for (JobExecutor executor: OneDev.getInstance(SettingManager.class).getJobExecutors()) {
if (executor.isEnabled()) {
if (executor.getJobAuthorization() == null) {
applicableJobExecutors.add(executor.getName());
} else {
if (JobAuthorization.parse(executor.getJobAuthorization()).matches(context))
applicableJobExecutors.add(executor.getName());
}
}
}
}
return SuggestionUtils.suggest(applicableJobExecutors, matchWith);
}
@Editable(order=200, description="Steps will be executed serially on same node, sharing the same <a href='https://docs.onedev.io/concepts#job-workspace'>job workspace</a>")
public List<Step> getSteps() {
return steps;
}
public void setSteps(List<Step> steps) {
this.steps = steps;
}
@Editable(order=400, name="Parameter Specs", group="Params & Triggers", description="Optionally define parameter specifications of the job")
@Valid
public List<ParamSpec> getParamSpecs() {
return paramSpecs;
}
public void setParamSpecs(List<ParamSpec> paramSpecs) {
this.paramSpecs = paramSpecs;
}
@Editable(order=500, group="Params & Triggers", description="Use triggers to run the job automatically under certain conditions")
@Valid
public List<JobTrigger> getTriggers() {
return triggers;
}
public void setTriggers(List<JobTrigger> triggers) {
this.triggers = triggers;
}
@Editable(name="Job Dependencies", order=9110, group="Dependencies & Services", description="Job dependencies determines the order and "
+ "concurrency when run different jobs. You may also specify artifacts to retrieve from upstream jobs")
@Valid
public List<JobDependency> getJobDependencies() {
return jobDependencies;
}
public void setJobDependencies(List<JobDependency> jobDependencies) {
this.jobDependencies = jobDependencies;
}
@Editable(name="Project Dependencies", order=9112, group="Dependencies & Services", description="Use project dependency to retrieve "
+ "artifacts from other projects")
@Valid
public List<ProjectDependency> getProjectDependencies() {
return projectDependencies;
}
public void setProjectDependencies(List<ProjectDependency> projectDependencies) {
this.projectDependencies = projectDependencies;
}
@Editable(order=9114, group="Dependencies & Services", placeholder="No required services",
description="Optionally specify services required by this job. "
+ "<b class='text-warning'>NOTE:</b> Services are only supported by docker aware executors "
+ "(server docker executor, remote docker executor, or kubernetes executor)")
@ChoiceProvider("getServiceChoices")
public List<String> getRequiredServices() {
return requiredServices;
}
public void setRequiredServices(List<String> requiredServices) {
this.requiredServices = requiredServices;
}
@SuppressWarnings("unused")
private static List<String> getServiceChoices() {
List<String> choices = new ArrayList<>();
Component component = ComponentContext.get().getComponent();
BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class);
if (buildSpecAware != null) {
BuildSpec buildSpec = buildSpecAware.getBuildSpec();
if (buildSpec != null) {
choices.addAll(buildSpec.getServiceMap().values().stream()
.map(it->it.getName()).collect(Collectors.toList()));
}
}
return choices;
}
@Editable(order=9400, group="More Settings", description="Specify condition to retry build upon failure")
@NotEmpty
@RetryCondition
public String getRetryCondition() {
return retryCondition;
}
public void setRetryCondition(String retryCondition) {
this.retryCondition = retryCondition;
}
@Editable(order=9410, group="More Settings", description="Maximum of retries before giving up")
@Min(value=1, message="This value should not be less than 1")
public int getMaxRetries() {
return maxRetries;
}
public void setMaxRetries(int maxRetries) {
this.maxRetries = maxRetries;
}
@Editable(order=9420, group="More Settings", description="Delay for the first retry in seconds. "
+ "Delay of subsequent retries will be calculated using an exponential back-off "
+ "based on this delay")
@Min(value=1, message="This value should not be less than 1")
public int getRetryDelay() {
return retryDelay;
}
public void setRetryDelay(int retryDelay) {
this.retryDelay = retryDelay;
}
@Editable(order=10050, name="CPU Requirement", group="More Settings", description="Specify CPU requirement of the job in millis. "
+ "1000 millis means a single CPU core")
public int getCpuRequirement() {
return cpuRequirement;
}
public void setCpuRequirement(int cpuRequirement) {
this.cpuRequirement = cpuRequirement;
}
@Editable(order=10060, group="More Settings", description="Specify memory requirement of the job in mega bytes")
public int getMemoryRequirement() {
return memoryRequirement;
}
public void setMemoryRequirement(int memoryRequirement) {
this.memoryRequirement = memoryRequirement;
}
@Editable(order=10100, group="More Settings", description="Cache specific paths to speed up job execution. "
+ "For instance for Java Maven projects executed by various docker executors, you may cache folder "
+ "<tt>/root/.m2/repository</tt> to avoid downloading dependencies for subsequent executions.<br>"
+ "<b class='text-danger'>WARNING</b>: When using cache, malicious jobs running with same job executor "
+ "can read or even pollute the cache intentionally using same cache key as yours. To avoid this "
+ "issue, make sure job executor executing your job can only be used by trusted jobs via job "
+ "authorization setting</b>")
@Valid
public List<CacheSpec> getCaches() {
return caches;
}
public void setCaches(List<CacheSpec> caches) {
this.caches = caches;
}
@Editable(order=10500, group="More Settings", description="Specify timeout in seconds")
public long getTimeout() {
return timeout;
}
public void setTimeout(long timeout) {
this.timeout = timeout;
}
@Editable(order=10600, name="Post Build Actions", group="More Settings")
@Valid
public List<PostBuildAction> getPostBuildActions() {
return postBuildActions;
}
public void setPostBuildActions(List<PostBuildAction> postBuildActions) {
this.postBuildActions = postBuildActions;
}
@Nullable
public JobTriggerMatch getTriggerMatch(ProjectEvent event) {
for (JobTrigger trigger: getTriggers()) {
SubmitReason reason = trigger.matches(event, this);
if (reason != null)
return new JobTriggerMatch(trigger, reason);
}
return null;
}
@Override
public boolean isValid(ConstraintValidatorContext context) {
boolean isValid = true;
Set<String> keys = new HashSet<>();
Set<String> paths = new HashSet<>();
for (CacheSpec cache: caches) {
if (!keys.add(cache.getKey())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate key (" + cache.getKey() + ")")
.addPropertyNode("caches").addConstraintViolation();
}
if (!paths.add(cache.getPath())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate path (" + cache.getPath() + ")")
.addPropertyNode("caches").addConstraintViolation();
}
}
Set<String> dependencyJobNames = new HashSet<>();
for (JobDependency dependency: jobDependencies) {
if (!dependencyJobNames.add(dependency.getJobName())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate dependency (" + dependency.getJobName() + ")")
.addPropertyNode("jobDependencies").addConstraintViolation();
}
}
Set<String> dependencyProjectPaths = new HashSet<>();
for (ProjectDependency dependency: projectDependencies) {
if (!dependencyProjectPaths.add(dependency.getProjectPath())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate dependency (" + dependency.getProjectPath() + ")")
.addPropertyNode("projectDependencies").addConstraintViolation();
}
}
Set<String> paramSpecNames = new HashSet<>();
for (ParamSpec paramSpec: paramSpecs) {
if (!paramSpecNames.add(paramSpec.getName())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate parameter spec (" + paramSpec.getName() + ")")
.addPropertyNode("paramSpecs").addConstraintViolation();
}
}
if (getRetryCondition() != null) {
try {
io.onedev.server.buildspec.job.retrycondition.RetryCondition.parse(this, getRetryCondition());
} catch (Exception e) {
String message = e.getMessage();
if (message == null)
message = "Malformed retry condition";
context.buildConstraintViolationWithTemplate(message)
.addPropertyNode(PROP_RETRY_CONDITION)
.addConstraintViolation();
isValid = false;
}
}
if (isValid) {
for (int triggerIndex=0; triggerIndex<getTriggers().size(); triggerIndex++) {
JobTrigger trigger = getTriggers().get(triggerIndex);
try {
ParamUtils.validateParams(getParamSpecs(), trigger.getParams());
} catch (Exception e) {
String errorMessage = String.format("Error validating job parameters (item: #%s, error message: %s)",
(triggerIndex+1), e.getMessage());
context.buildConstraintViolationWithTemplate(errorMessage)
.addPropertyNode(PROP_TRIGGERS)
.addConstraintViolation();
isValid = false;
}
}
}
if (!isValid)
context.disableDefaultConstraintViolation();
return isValid;
}
public Map<String, ParamSpec> getParamSpecMap() {
if (paramSpecMap == null)
paramSpecMap = ParamUtils.getParamSpecMap(paramSpecs);
return paramSpecMap;
}
public static String getBuildQuery(ObjectId commitId, String jobName,
@Nullable Build pipelineOf, @Nullable String refName, @Nullable PullRequest request) {
String query = ""
+ Criteria.quote(NAME_COMMIT) + " " + getRuleName(Is) + " " + Criteria.quote(commitId.name())
+ " " + getRuleName(And) + " "
+ Criteria.quote(NAME_JOB) + " " + getRuleName(Is) + " " + Criteria.quote(jobName);
if (pipelineOf != null)
query = query + " " + getRuleName(And) + " " + getRuleName(InPipelineOf) + " " + Criteria.quote("#" + pipelineOf.getNumber());
if (request != null) {
query = query
+ " " + getRuleName(And) + " "
+ Criteria.quote(NAME_PULL_REQUEST) + " " + getRuleName(Is) + " " + Criteria.quote("#" + request.getNumber());
}
if (refName != null) {
String branch = GitUtils.ref2branch(refName);
if (branch != null) {
query = query
+ " " + getRuleName(And) + " "
+ Criteria.quote(NAME_BRANCH) + " " + getRuleName(Is) + " " + Criteria.quote(branch);
}
String tag = GitUtils.ref2tag(refName);
if (tag != null) {
query = query
+ " " + getRuleName(And) + " "
+ Criteria.quote(NAME_TAG) + " " + getRuleName(Is) + " " + Criteria.quote(tag);
}
}
return query;
}
public static List<String> getChoices() {
List<String> choices = new ArrayList<>();
Component component = ComponentContext.get().getComponent();
BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class);
if (buildSpecAware != null) {
BuildSpec buildSpec = buildSpecAware.getBuildSpec();
if (buildSpec != null) {
choices.addAll(buildSpec.getJobMap().values().stream()
.map(it->it.getName()).collect(Collectors.toList()));
}
JobAware jobAware = WicketUtils.findInnermost(component, JobAware.class);
if (jobAware != null) {
Job job = jobAware.getJob();
if (job != null)
choices.remove(job.getName());
}
}
return choices;
}
@SuppressWarnings("unused")
private static List<InputSuggestion> suggestVariables(String matchWith) {
return BuildSpec.suggestVariables(matchWith, false, false, false);
}
}
package io.onedev.server.buildspec.job;
import static io.onedev.server.model.Build.NAME_BRANCH;
import static io.onedev.server.model.Build.NAME_COMMIT;
import static io.onedev.server.model.Build.NAME_JOB;
import static io.onedev.server.model.Build.NAME_PULL_REQUEST;
import static io.onedev.server.model.Build.NAME_TAG;
import static io.onedev.server.search.entity.build.BuildQuery.getRuleName;
import static io.onedev.server.search.entity.build.BuildQueryLexer.And;
import static io.onedev.server.search.entity.build.BuildQueryLexer.InPipelineOf;
import static io.onedev.server.search.entity.build.BuildQueryLexer.Is;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import javax.validation.ConstraintValidatorContext;
import javax.validation.Valid;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotEmpty;
import org.apache.wicket.Component;
import org.eclipse.jgit.lib.ObjectId;
import io.onedev.commons.codeassist.InputCompletion;
import io.onedev.commons.codeassist.InputStatus;
import io.onedev.commons.codeassist.InputSuggestion;
import io.onedev.server.OneDev;
import io.onedev.server.buildspec.BuildSpec;
import io.onedev.server.buildspec.BuildSpecAware;
import io.onedev.server.buildspec.NamedElement;
import io.onedev.server.buildspec.job.action.PostBuildAction;
import io.onedev.server.buildspec.job.projectdependency.ProjectDependency;
import io.onedev.server.buildspec.job.trigger.JobTrigger;
import io.onedev.server.buildspec.param.ParamUtils;
import io.onedev.server.buildspec.param.spec.ParamSpec;
import io.onedev.server.buildspec.step.Step;
import io.onedev.server.entitymanager.SettingManager;
import io.onedev.server.event.project.ProjectEvent;
import io.onedev.server.git.GitUtils;
import io.onedev.server.job.authorization.JobAuthorization;
import io.onedev.server.job.authorization.JobAuthorization.Context;
import io.onedev.server.model.Build;
import io.onedev.server.model.PullRequest;
import io.onedev.server.model.support.administration.jobexecutor.JobExecutor;
import io.onedev.server.util.ComponentContext;
import io.onedev.server.util.EditContext;
import io.onedev.server.util.criteria.Criteria;
import io.onedev.server.util.validation.Validatable;
import io.onedev.server.util.validation.annotation.ClassValidating;
import io.onedev.server.web.editable.annotation.ChoiceProvider;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Interpolative;
import io.onedev.server.web.editable.annotation.RetryCondition;
import io.onedev.server.web.editable.annotation.SuggestionProvider;
import io.onedev.server.web.page.project.blob.ProjectBlobPage;
import io.onedev.server.web.util.SuggestionUtils;
import io.onedev.server.web.util.WicketUtils;
@Editable
@ClassValidating
public class Job implements NamedElement, Serializable, Validatable {
private static final long serialVersionUID = 1L;
public static final String SELECTION_PREFIX = "jobs/";
public static final String PROP_NAME = "name";
public static final String PROP_JOB_DEPENDENCIES = "jobDependencies";
public static final String PROP_REQUIRED_SERVICES = "requiredServices";
public static final String PROP_TRIGGERS = "triggers";
public static final String PROP_STEPS = "steps";
public static final String PROP_RETRY_CONDITION = "retryCondition";
public static final String PROP_POST_BUILD_ACTIONS = "postBuildActions";
private String name;
private String jobExecutor;
private List<Step> steps = new ArrayList<>();
private List<ParamSpec> paramSpecs = new ArrayList<>();
private List<JobDependency> jobDependencies = new ArrayList<>();
private List<ProjectDependency> projectDependencies = new ArrayList<>();
private List<String> requiredServices = new ArrayList<>();
private List<JobTrigger> triggers = new ArrayList<>();
private List<CacheSpec> caches = new ArrayList<>();
private long timeout = 3600;
private List<PostBuildAction> postBuildActions = new ArrayList<>();
private String retryCondition = "never";
private int maxRetries = 3;
private int retryDelay = 30;
private transient Map<String, ParamSpec> paramSpecMap;
@Editable(order=100, description="Specify name of the job")
@SuggestionProvider("getNameSuggestions")
@NotEmpty
@Override
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@SuppressWarnings("unused")
private static List<InputCompletion> getNameSuggestions(InputStatus status) {
BuildSpec buildSpec = BuildSpec.get();
if (buildSpec != null) {
List<String> candidates = new ArrayList<>(buildSpec.getJobMap().keySet());
buildSpec.getJobs().forEach(it->candidates.remove(it.getName()));
return BuildSpec.suggestOverrides(candidates, status);
}
return new ArrayList<>();
}
@Editable(order=200, placeholder="Use Any Applicable Executor", description="Optionally specify authorized executor "
+ "for this job. Leave empty to use first authorized executor")
@Interpolative(literalSuggester="suggestJobExecutors", variableSuggester="suggestVariables")
public String getJobExecutor() {
return jobExecutor;
}
public void setJobExecutor(String jobExecutor) {
this.jobExecutor = jobExecutor;
}
@SuppressWarnings("unused")
private static List<InputSuggestion> suggestJobExecutors(String matchWith) {
List<String> applicableJobExecutors = new ArrayList<>();
ProjectBlobPage page = (ProjectBlobPage) WicketUtils.getPage();
String jobName = (String) EditContext.get().getInputValue(PROP_NAME);
if (jobName != null) {
Context context = new Context(page.getProject(), page.getBlobIdent().revision, jobName);
for (JobExecutor executor: OneDev.getInstance(SettingManager.class).getJobExecutors()) {
if (executor.isEnabled()) {
if (executor.getJobAuthorization() == null) {
applicableJobExecutors.add(executor.getName());
} else {
if (JobAuthorization.parse(executor.getJobAuthorization()).matches(context))
applicableJobExecutors.add(executor.getName());
}
}
}
}
return SuggestionUtils.suggest(applicableJobExecutors, matchWith);
}
@Editable(order=200, description="Steps will be executed serially on same node, sharing the same <a href='https://docs.onedev.io/concepts#job-workspace'>job workspace</a>")
public List<Step> getSteps() {
return steps;
}
public void setSteps(List<Step> steps) {
this.steps = steps;
}
@Editable(order=400, name="Parameter Specs", group="Params & Triggers", description="Optionally define parameter specifications of the job")
@Valid
public List<ParamSpec> getParamSpecs() {
return paramSpecs;
}
public void setParamSpecs(List<ParamSpec> paramSpecs) {
this.paramSpecs = paramSpecs;
}
@Editable(order=500, group="Params & Triggers", description="Use triggers to run the job automatically under certain conditions")
@Valid
public List<JobTrigger> getTriggers() {
return triggers;
}
public void setTriggers(List<JobTrigger> triggers) {
this.triggers = triggers;
}
@Editable(name="Job Dependencies", order=9110, group="Dependencies & Services", description="Job dependencies determines the order and "
+ "concurrency when run different jobs. You may also specify artifacts to retrieve from upstream jobs")
@Valid
public List<JobDependency> getJobDependencies() {
return jobDependencies;
}
public void setJobDependencies(List<JobDependency> jobDependencies) {
this.jobDependencies = jobDependencies;
}
@Editable(name="Project Dependencies", order=9112, group="Dependencies & Services", description="Use project dependency to retrieve "
+ "artifacts from other projects")
@Valid
public List<ProjectDependency> getProjectDependencies() {
return projectDependencies;
}
public void setProjectDependencies(List<ProjectDependency> projectDependencies) {
this.projectDependencies = projectDependencies;
}
@Editable(order=9114, group="Dependencies & Services", placeholder="No required services",
description="Optionally specify services required by this job. "
+ "<b class='text-warning'>NOTE:</b> Services are only supported by docker aware executors "
+ "(server docker executor, remote docker executor, or kubernetes executor)")
@ChoiceProvider("getServiceChoices")
public List<String> getRequiredServices() {
return requiredServices;
}
public void setRequiredServices(List<String> requiredServices) {
this.requiredServices = requiredServices;
}
@SuppressWarnings("unused")
private static List<String> getServiceChoices() {
List<String> choices = new ArrayList<>();
Component component = ComponentContext.get().getComponent();
BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class);
if (buildSpecAware != null) {
BuildSpec buildSpec = buildSpecAware.getBuildSpec();
if (buildSpec != null) {
choices.addAll(buildSpec.getServiceMap().values().stream()
.map(it->it.getName()).collect(Collectors.toList()));
}
}
return choices;
}
@Editable(order=9400, group="More Settings", description="Specify condition to retry build upon failure")
@NotEmpty
@RetryCondition
public String getRetryCondition() {
return retryCondition;
}
public void setRetryCondition(String retryCondition) {
this.retryCondition = retryCondition;
}
@Editable(order=9410, group="More Settings", description="Maximum of retries before giving up")
@Min(value=1, message="This value should not be less than 1")
public int getMaxRetries() {
return maxRetries;
}
public void setMaxRetries(int maxRetries) {
this.maxRetries = maxRetries;
}
@Editable(order=9420, group="More Settings", description="Delay for the first retry in seconds. "
+ "Delay of subsequent retries will be calculated using an exponential back-off "
+ "based on this delay")
@Min(value=1, message="This value should not be less than 1")
public int getRetryDelay() {
return retryDelay;
}
public void setRetryDelay(int retryDelay) {
this.retryDelay = retryDelay;
}
@Editable(order=10100, group="More Settings", description="Cache specific paths to speed up job execution. "
+ "For instance for Java Maven projects executed by various docker executors, you may cache folder "
+ "<tt>/root/.m2/repository</tt> to avoid downloading dependencies for subsequent executions.<br>"
+ "<b class='text-danger'>WARNING</b>: When using cache, malicious jobs running with same job executor "
+ "can read or even pollute the cache intentionally using same cache key as yours. To avoid this "
+ "issue, make sure job executor executing your job can only be used by trusted jobs via job "
+ "authorization setting</b>")
@Valid
public List<CacheSpec> getCaches() {
return caches;
}
public void setCaches(List<CacheSpec> caches) {
this.caches = caches;
}
@Editable(order=10500, group="More Settings", description="Specify timeout in seconds")
public long getTimeout() {
return timeout;
}
public void setTimeout(long timeout) {
this.timeout = timeout;
}
@Editable(order=10600, name="Post Build Actions", group="More Settings")
@Valid
public List<PostBuildAction> getPostBuildActions() {
return postBuildActions;
}
public void setPostBuildActions(List<PostBuildAction> postBuildActions) {
this.postBuildActions = postBuildActions;
}
@Nullable
public JobTriggerMatch getTriggerMatch(ProjectEvent event) {
for (JobTrigger trigger: getTriggers()) {
SubmitReason reason = trigger.matches(event, this);
if (reason != null)
return new JobTriggerMatch(trigger, reason);
}
return null;
}
@Override
public boolean isValid(ConstraintValidatorContext context) {
boolean isValid = true;
Set<String> keys = new HashSet<>();
Set<String> paths = new HashSet<>();
for (CacheSpec cache: caches) {
if (!keys.add(cache.getKey())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate key (" + cache.getKey() + ")")
.addPropertyNode("caches").addConstraintViolation();
}
if (!paths.add(cache.getPath())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate path (" + cache.getPath() + ")")
.addPropertyNode("caches").addConstraintViolation();
}
}
Set<String> dependencyJobNames = new HashSet<>();
for (JobDependency dependency: jobDependencies) {
if (!dependencyJobNames.add(dependency.getJobName())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate dependency (" + dependency.getJobName() + ")")
.addPropertyNode("jobDependencies").addConstraintViolation();
}
}
Set<String> dependencyProjectPaths = new HashSet<>();
for (ProjectDependency dependency: projectDependencies) {
if (!dependencyProjectPaths.add(dependency.getProjectPath())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate dependency (" + dependency.getProjectPath() + ")")
.addPropertyNode("projectDependencies").addConstraintViolation();
}
}
Set<String> paramSpecNames = new HashSet<>();
for (ParamSpec paramSpec: paramSpecs) {
if (!paramSpecNames.add(paramSpec.getName())) {
isValid = false;
context.buildConstraintViolationWithTemplate("Duplicate parameter spec (" + paramSpec.getName() + ")")
.addPropertyNode("paramSpecs").addConstraintViolation();
}
}
if (getRetryCondition() != null) {
try {
io.onedev.server.buildspec.job.retrycondition.RetryCondition.parse(this, getRetryCondition());
} catch (Exception e) {
String message = e.getMessage();
if (message == null)
message = "Malformed retry condition";
context.buildConstraintViolationWithTemplate(message)
.addPropertyNode(PROP_RETRY_CONDITION)
.addConstraintViolation();
isValid = false;
}
}
if (isValid) {
for (int triggerIndex=0; triggerIndex<getTriggers().size(); triggerIndex++) {
JobTrigger trigger = getTriggers().get(triggerIndex);
try {
ParamUtils.validateParams(getParamSpecs(), trigger.getParams());
} catch (Exception e) {
String errorMessage = String.format("Error validating job parameters (item: #%s, error message: %s)",
(triggerIndex+1), e.getMessage());
context.buildConstraintViolationWithTemplate(errorMessage)
.addPropertyNode(PROP_TRIGGERS)
.addConstraintViolation();
isValid = false;
}
}
}
if (!isValid)
context.disableDefaultConstraintViolation();
return isValid;
}
public Map<String, ParamSpec> getParamSpecMap() {
if (paramSpecMap == null)
paramSpecMap = ParamUtils.getParamSpecMap(paramSpecs);
return paramSpecMap;
}
public static String getBuildQuery(ObjectId commitId, String jobName,
@Nullable Build pipelineOf, @Nullable String refName, @Nullable PullRequest request) {
String query = ""
+ Criteria.quote(NAME_COMMIT) + " " + getRuleName(Is) + " " + Criteria.quote(commitId.name())
+ " " + getRuleName(And) + " "
+ Criteria.quote(NAME_JOB) + " " + getRuleName(Is) + " " + Criteria.quote(jobName);
if (pipelineOf != null)
query = query + " " + getRuleName(And) + " " + getRuleName(InPipelineOf) + " " + Criteria.quote("#" + pipelineOf.getNumber());
if (request != null) {
query = query
+ " " + getRuleName(And) + " "
+ Criteria.quote(NAME_PULL_REQUEST) + " " + getRuleName(Is) + " " + Criteria.quote("#" + request.getNumber());
}
if (refName != null) {
String branch = GitUtils.ref2branch(refName);
if (branch != null) {
query = query
+ " " + getRuleName(And) + " "
+ Criteria.quote(NAME_BRANCH) + " " + getRuleName(Is) + " " + Criteria.quote(branch);
}
String tag = GitUtils.ref2tag(refName);
if (tag != null) {
query = query
+ " " + getRuleName(And) + " "
+ Criteria.quote(NAME_TAG) + " " + getRuleName(Is) + " " + Criteria.quote(tag);
}
}
return query;
}
public static List<String> getChoices() {
List<String> choices = new ArrayList<>();
Component component = ComponentContext.get().getComponent();
BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class);
if (buildSpecAware != null) {
BuildSpec buildSpec = buildSpecAware.getBuildSpec();
if (buildSpec != null) {
choices.addAll(buildSpec.getJobMap().values().stream()
.map(it->it.getName()).collect(Collectors.toList()));
}
JobAware jobAware = WicketUtils.findInnermost(component, JobAware.class);
if (jobAware != null) {
Job job = jobAware.getJob();
if (job != null)
choices.remove(job.getName());
}
}
return choices;
}
@SuppressWarnings("unused")
private static List<InputSuggestion> suggestVariables(String matchWith) {
return BuildSpec.suggestVariables(matchWith, false, false, false);
}
}

View File

@ -29,9 +29,9 @@ public interface AgentManager extends EntityManager<Agent> {
Map<Long, UUID> getAgentServers();
List<String> getOsNames();
Collection<String> getOsNames();
List<String> getOsArchs();
Collection<String> getOsArchs();
List<Agent> query(EntityQuery<Agent> agentQuery, int firstResult, int maxResults);

View File

@ -1,39 +1,9 @@
package io.onedev.server.entitymanager.impl;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import org.apache.commons.lang.SerializationUtils;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jetty.websocket.api.Session;
import org.hibernate.criterion.Restrictions;
import org.hibernate.query.Query;
import com.google.common.base.Splitter;
import com.hazelcast.cluster.MembershipEvent;
import com.hazelcast.cluster.MembershipListener;
import com.hazelcast.core.HazelcastInstance;
import edu.emory.mathcs.backport.java.util.Collections;
import io.onedev.agent.AgentData;
import io.onedev.agent.Message;
import io.onedev.agent.MessageTypes;
@ -68,6 +38,25 @@ import io.onedev.server.search.entity.EntitySort.Direction;
import io.onedev.server.search.entity.agent.AgentQuery;
import io.onedev.server.util.criteria.Criteria;
import io.onedev.server.util.validation.AttributeNameValidator;
import org.apache.commons.lang.SerializationUtils;
import org.apache.commons.lang3.StringUtils;
import org.eclipse.jetty.websocket.api.Session;
import org.hibernate.criterion.Restrictions;
import org.hibernate.query.Query;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Root;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
@Singleton
public class DefaultAgentManager extends BaseEntityManager<Agent> implements AgentManager, Serializable {
@ -200,8 +189,7 @@ public class DefaultAgentManager extends BaseEntityManager<Agent> implements Age
agent.setOsVersion(data.getOsInfo().getOsVersion());
agent.setOsArch(data.getOsInfo().getOsArch());
agent.setName(data.getName());
agent.setCpu(data.getCpu());
agent.setMemory(data.getMemory());
agent.setCpus(data.getCpus());
agent.setTemporal(data.isTemporal());
agent.setIpAddress(data.getIpAddress());
save(agent);
@ -221,8 +209,7 @@ public class DefaultAgentManager extends BaseEntityManager<Agent> implements Age
agent.setOsVersion(data.getOsInfo().getOsVersion());
agent.setOsArch(data.getOsInfo().getOsArch());
agent.setIpAddress(data.getIpAddress());
agent.setCpu(data.getCpu());
agent.setMemory(data.getMemory());
agent.setCpus(data.getCpus());
agent.setTemporal(data.isTemporal());
save(agent);
attributeManager.syncAttributes(agent, data.getAttributes());
@ -290,23 +277,19 @@ public class DefaultAgentManager extends BaseEntityManager<Agent> implements Age
@Override
public Map<Long, UUID> getAgentServers() {
return new HashMap<>(agentServers);
return agentServers;
}
@Sessional
@Override
public List<String> getOsNames() {
List<String> osNames = new ArrayList<>(this.osNames.keySet());
Collections.sort(osNames);
return osNames;
public Collection<String> getOsNames() {
return osNames.keySet();
}
@Sessional
@Override
public List<String> getOsArchs() {
List<String> osArchs = new ArrayList<>(this.osArchs.keySet());
Collections.sort(osArchs);
return osArchs;
public Collection<String> getOsArchs() {
return osArchs.keySet();
}
private CriteriaQuery<Agent> buildCriteriaQuery(org.hibernate.Session session, EntityQuery<Agent> agentQuery) {

View File

@ -1,33 +0,0 @@
package io.onedev.server.job;
import org.eclipse.jetty.websocket.api.Session;
import io.onedev.agent.AgentData;
public class AgentInfo {
private final Long id;
private final AgentData data;
private final Session session;
public AgentInfo(Long id, AgentData data, Session session) {
this.id = id;
this.data = data;
this.session = session;
}
public Long getId() {
return id;
}
public AgentData getData() {
return data;
}
public Session getSession() {
return session;
}
}

View File

@ -0,0 +1,9 @@
package io.onedev.server.job;
import java.io.Serializable;
public interface AgentRunnable extends Serializable {
void run(Long agentId);
}

View File

@ -2,13 +2,13 @@ package io.onedev.server.job;
import javax.annotation.Nullable;
public class CancellerAwareCancellationException extends java.util.concurrent.CancellationException {
public class CancellationException extends java.util.concurrent.CancellationException {
private static final long serialVersionUID = 1L;
private final Long cancellerId;
public CancellerAwareCancellationException(@Nullable Long cancellerId) {
public CancellationException(@Nullable Long cancellerId) {
this.cancellerId = cancellerId;
}

View File

@ -1,87 +1,18 @@
package io.onedev.server.job;
import static io.onedev.k8shelper.KubernetesHelper.BUILD_VERSION;
import static io.onedev.k8shelper.KubernetesHelper.replacePlaceholders;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.function.Consumer;
import java.util.function.Function;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import org.apache.shiro.authz.UnauthorizedException;
import org.apache.shiro.subject.Subject;
import org.eclipse.jgit.lib.ObjectId;
import org.quartz.CronScheduleBuilder;
import org.quartz.ScheduleBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.onedev.agent.job.FailedException;
import io.onedev.commons.loader.ManagedSerializedForm;
import io.onedev.commons.utils.ExceptionUtils;
import io.onedev.commons.utils.ExplicitException;
import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.LockUtils;
import io.onedev.commons.utils.StringUtils;
import io.onedev.commons.utils.TaskLogger;
import io.onedev.k8shelper.Action;
import io.onedev.k8shelper.CacheAllocationRequest;
import io.onedev.k8shelper.CacheInstance;
import io.onedev.k8shelper.KubernetesHelper;
import io.onedev.k8shelper.LeafFacade;
import io.onedev.k8shelper.ServerSideFacade;
import io.onedev.commons.utils.*;
import io.onedev.k8shelper.*;
import io.onedev.server.OneDev;
import io.onedev.server.buildspec.BuildSpec;
import io.onedev.server.buildspec.BuildSpecParseException;
import io.onedev.server.buildspec.Service;
import io.onedev.server.buildspec.job.CacheSpec;
import io.onedev.server.buildspec.job.Job;
import io.onedev.server.buildspec.job.JobDependency;
import io.onedev.server.buildspec.job.JobExecutorDiscoverer;
import io.onedev.server.buildspec.job.JobTriggerMatch;
import io.onedev.server.buildspec.job.SubmitReason;
import io.onedev.server.buildspec.job.*;
import io.onedev.server.buildspec.job.action.PostBuildAction;
import io.onedev.server.buildspec.job.action.condition.ActionCondition;
import io.onedev.server.buildspec.job.projectdependency.ProjectDependency;
@ -95,27 +26,16 @@ import io.onedev.server.buildspec.param.spec.SecretParam;
import io.onedev.server.buildspec.step.ServerSideStep;
import io.onedev.server.buildspec.step.Step;
import io.onedev.server.cluster.ClusterManager;
import io.onedev.server.cluster.ClusterRunnable;
import io.onedev.server.cluster.ClusterTask;
import io.onedev.server.entitymanager.AgentManager;
import io.onedev.server.entitymanager.BuildManager;
import io.onedev.server.entitymanager.BuildParamManager;
import io.onedev.server.entitymanager.ProjectManager;
import io.onedev.server.entitymanager.PullRequestManager;
import io.onedev.server.entitymanager.SettingManager;
import io.onedev.server.entitymanager.UserManager;
import io.onedev.server.entitymanager.*;
import io.onedev.server.event.Listen;
import io.onedev.server.event.ListenerRegistry;
import io.onedev.server.event.project.DefaultBranchChanged;
import io.onedev.server.event.project.ProjectEvent;
import io.onedev.server.event.project.RefUpdated;
import io.onedev.server.event.project.ScheduledTimeReaches;
import io.onedev.server.event.project.build.BuildEvent;
import io.onedev.server.event.project.build.BuildFinished;
import io.onedev.server.event.project.build.BuildPending;
import io.onedev.server.event.project.build.BuildRetrying;
import io.onedev.server.event.project.build.BuildRunning;
import io.onedev.server.event.project.build.BuildSubmitted;
import io.onedev.server.event.project.build.BuildUpdated;
import io.onedev.server.event.project.build.*;
import io.onedev.server.event.project.pullrequest.PullRequestEvent;
import io.onedev.server.event.system.SystemStarted;
import io.onedev.server.event.system.SystemStopping;
@ -127,13 +47,8 @@ import io.onedev.server.job.authorization.JobAuthorization;
import io.onedev.server.job.authorization.JobAuthorization.Context;
import io.onedev.server.job.log.LogManager;
import io.onedev.server.job.log.LogTask;
import io.onedev.server.model.Build;
import io.onedev.server.model.*;
import io.onedev.server.model.Build.Status;
import io.onedev.server.model.BuildDependence;
import io.onedev.server.model.BuildParam;
import io.onedev.server.model.Project;
import io.onedev.server.model.PullRequest;
import io.onedev.server.model.User;
import io.onedev.server.model.support.administration.jobexecutor.JobExecutor;
import io.onedev.server.persistence.SessionManager;
import io.onedev.server.persistence.TransactionManager;
@ -161,6 +76,37 @@ import io.onedev.server.util.script.identity.ScriptIdentity;
import io.onedev.server.web.editable.EditableStringTransformer;
import io.onedev.server.web.editable.EditableUtils;
import io.onedev.server.web.editable.annotation.Interpolative;
import org.apache.shiro.authz.UnauthorizedException;
import org.apache.shiro.subject.Subject;
import org.eclipse.jgit.lib.ObjectId;
import org.quartz.CronScheduleBuilder;
import org.quartz.ScheduleBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Invocation;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import java.io.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Lock;
import java.util.function.Consumer;
import java.util.function.Function;
import static io.onedev.k8shelper.KubernetesHelper.BUILD_VERSION;
import static io.onedev.k8shelper.KubernetesHelper.replacePlaceholders;
@Singleton
public class DefaultJobManager implements JobManager, Runnable, CodePullAuthorizationSource, Serializable {
@ -175,7 +121,7 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
private final Map<String, List<Action>> jobActions = new ConcurrentHashMap<>();
private final Map<String, JobExecutor> jobExecutors = new ConcurrentHashMap<>();
private final Map<String, JobRunnable> jobRunnables = new ConcurrentHashMap<>();
private final Map<Long, Collection<String>> scheduledTasks = new ConcurrentHashMap<>();
@ -557,163 +503,142 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
String jobExecutorName = interpolator.interpolate(build.getJob().getJobExecutor());
JobExecutor jobExecutor = getJobExecutor(build, jobExecutorName, jobLogger);
if (jobExecutor != null) {
Long projectId = build.getProject().getId();
String projectPath = build.getProject().getPath();
String projectGitDir = storageManager.getProjectGitDir(build.getProject().getId()).getAbsolutePath();
Long buildId = build.getId();
Long buildNumber = build.getNumber();
String refName = build.getRefName();
ObjectId commitId = ObjectId.fromString(build.getCommitHash());
BuildSpec buildSpec = build.getSpec();
AtomicInteger maxRetries = new AtomicInteger(0);
AtomicInteger retryDelay = new AtomicInteger(0);
List<CacheSpec> caches = new ArrayList<>();
List<Service> services = new ArrayList<>();
List<Action> actions = new ArrayList<>();
Job job;
JobSecretAuthorizationContext.push(build.getJobSecretAuthorizationContext());
Build.push(build);
try {
job = build.getJob();
for (Step step: job.getSteps()) {
step = interpolator.interpolateProperties(step);
actions.add(step.getAction(build, jobToken, build.getParamCombination()));
}
for (CacheSpec cache: job.getCaches())
caches.add(interpolator.interpolateProperties(cache));
for (String serviceName: job.getRequiredServices()) {
Service service = buildSpec.getServiceMap().get(serviceName);
services.add(interpolator.interpolateProperties(service));
}
maxRetries.set(job.getMaxRetries());
retryDelay.set(job.getRetryDelay());
} finally {
Build.pop();
JobSecretAuthorizationContext.pop();
}
Long projectId = build.getProject().getId();
String projectPath = build.getProject().getPath();
String projectGitDir = storageManager.getProjectGitDir(build.getProject().getId()).getAbsolutePath();
Long buildId = build.getId();
Long buildNumber = build.getNumber();
String refName = build.getRefName();
ObjectId commitId = ObjectId.fromString(build.getCommitHash());
BuildSpec buildSpec = build.getSpec();
int cpuRequirement = 0, memoryRequirement = 0;
cpuRequirement = job.getCpuRequirement();
memoryRequirement = job.getMemoryRequirement();
AtomicInteger maxRetries = new AtomicInteger(0);
AtomicInteger retryDelay = new AtomicInteger(0);
List<CacheSpec> caches = new ArrayList<>();
List<Service> services = new ArrayList<>();
List<Action> actions = new ArrayList<>();
Job job;
JobSecretAuthorizationContext.push(build.getJobSecretAuthorizationContext());
Build.push(build);
try {
job = build.getJob();
for (Step step: job.getSteps()) {
step = interpolator.interpolateProperties(step);
actions.add(step.getAction(build, jobToken, build.getParamCombination()));
}
for (CacheSpec cache: job.getCaches())
caches.add(interpolator.interpolateProperties(cache));
for (String serviceName: job.getRequiredServices()) {
Service service = buildSpec.getServiceMap().get(serviceName);
if (service != null) {
cpuRequirement += service.getCpuRequirement();
memoryRequirement += service.getMemoryRequirement();
}
services.add(interpolator.interpolateProperties(service));
}
Map<String, Integer> resourceRequirements = new HashMap<>();
resourceRequirements.put(ResourceAllocator.CPU, cpuRequirement);
resourceRequirements.put(ResourceAllocator.MEMORY, memoryRequirement);
maxRetries.set(job.getMaxRetries());
retryDelay.set(job.getRetryDelay());
} finally {
Build.pop();
JobSecretAuthorizationContext.pop();
}
AtomicReference<JobExecution> executionRef = new AtomicReference<>(null);
executionRef.set(new JobExecution(executorService.submit(new Runnable() {
AtomicReference<JobExecution> executionRef = new AtomicReference<>(null);
executionRef.set(new JobExecution(executorService.submit(new Runnable() {
@Override
public void run() {
AtomicInteger retried = new AtomicInteger(0);
while (true) {
JobContext jobContext = new JobContext(jobToken, jobExecutor, projectId, projectPath,
projectGitDir, buildId, buildNumber, actions, refName, commitId, caches, services,
resourceRequirements, retried.get());
// Store original job actions as the copy in job context will be fetched from cluster and
// some transient fields (such as step object in ServerSideFacade) will not be preserved
jobActions.put(jobToken, actions);
jobContexts.put(jobToken, jobContext);
logManager.addJobLogger(jobToken, jobLogger);
serverStepThreads.put(jobToken, new ArrayList<>());
try {
jobLogger.log("Waiting for resources...");
resourceAllocator.run(
new JobRunnable(jobToken), jobExecutor.getAgentRequirement(), resourceRequirements);
break;
} catch (Throwable e) {
if (retried.getAndIncrement() < maxRetries.get() && sessionManager.call(new Callable<Boolean>() {
@Override
public void run() {
AtomicInteger retried = new AtomicInteger(0);
while (true) {
JobContext jobContext = new JobContext(jobToken, jobExecutor, projectId, projectPath,
projectGitDir, buildId, buildNumber, actions, refName, commitId, caches, services,
retried.get());
// Store original job actions as the copy in job context will be fetched from cluster and
// some transient fields (such as step object in ServerSideFacade) will not be preserved
jobActions.put(jobToken, actions);
jobContexts.put(jobToken, jobContext);
logManager.addJobLogger(jobToken, jobLogger);
serverStepThreads.put(jobToken, new ArrayList<>());
try {
jobExecutor.execute(jobContext);
break;
} catch (Throwable e) {
if (retried.getAndIncrement() < maxRetries.get() && sessionManager.call(new Callable<Boolean>() {
@Override
public Boolean call() {
RetryCondition retryCondition = RetryCondition.parse(job, job.getRetryCondition());
AtomicReference<String> errorMessage = new AtomicReference<>(null);
log(e, new TaskLogger() {
@Override
public void log(String message, String sessionId) {
errorMessage.set(message);
}
});
return retryCondition.matches(new RetryContext(buildManager.load(buildId), errorMessage.get()));
}
})) {
log(e, jobLogger);
jobLogger.warning("Job will be retried after a while...");
transactionManager.run(new Runnable() {
@Override
public Boolean call() {
RetryCondition retryCondition = RetryCondition.parse(job, job.getRetryCondition());
AtomicReference<String> errorMessage = new AtomicReference<>(null);
log(e, new TaskLogger() {
@Override
public void log(String message, String sessionId) {
errorMessage.set(message);
}
});
return retryCondition.matches(new RetryContext(buildManager.load(buildId), errorMessage.get()));
public void run() {
Build build = buildManager.load(buildId);
build.setRunningDate(null);
build.setPendingDate(null);
build.setRetryDate(new Date());
build.setStatus(Status.WAITING);
listenerRegistry.post(new BuildRetrying(build));
buildManager.save(build);
}
})) {
log(e, jobLogger);
jobLogger.warning("Job will be retried after a while...");
transactionManager.run(new Runnable() {
});
try {
Thread.sleep(retryDelay.get() * (long)(Math.pow(2, retried.get())) * 1000L);
} catch (InterruptedException e2) {
throw new RuntimeException(e2);
}
transactionManager.run(new Runnable() {
@Override
public void run() {
Build build = buildManager.load(buildId);
build.setRunningDate(null);
build.setPendingDate(null);
build.setRetryDate(new Date());
build.setStatus(Build.Status.WAITING);
listenerRegistry.post(new BuildRetrying(build));
buildManager.save(build);
}
});
try {
Thread.sleep(retryDelay.get() * (long)(Math.pow(2, retried.get())) * 1000L);
} catch (InterruptedException e2) {
throw new RuntimeException(e2);
@Override
public void run() {
JobExecution execution = executionRef.get();
if (execution != null)
execution.updateBeginTime();
Build build = buildManager.load(buildId);
build.setPendingDate(new Date());
build.setStatus(Status.PENDING);
listenerRegistry.post(new BuildPending(build));
buildManager.save(build);
}
transactionManager.run(new Runnable() {
@Override
public void run() {
JobExecution execution = executionRef.get();
if (execution != null)
execution.updateBeginTime();
Build build = buildManager.load(buildId);
build.setPendingDate(new Date());
build.setStatus(Build.Status.PENDING);
listenerRegistry.post(new BuildPending(build));
buildManager.save(build);
}
});
} else {
throw ExceptionUtils.unchecked(e);
}
} finally {
Collection<Thread> threads = serverStepThreads.remove(jobToken);
synchronized (threads) {
for (Thread thread: threads)
thread.interrupt();
}
logManager.removeJobLogger(jobToken);
jobContexts.remove(jobToken);
jobActions.remove(jobToken);
});
} else {
throw ExceptionUtils.unchecked(e);
}
}
}
}), job.getTimeout()*1000L));
} finally {
Collection<Thread> threads = serverStepThreads.remove(jobToken);
synchronized (threads) {
for (Thread thread: threads)
thread.interrupt();
}
logManager.removeJobLogger(jobToken);
jobContexts.remove(jobToken);
jobActions.remove(jobToken);
}
}
}
return executionRef.get();
} else {
throw new ExplicitException("No applicable job executor");
}
}), job.getTimeout()*1000L));
return executionRef.get();
}
private void log(Throwable e, TaskLogger logger) {
@ -896,9 +821,9 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
public Void call() throws Exception {
JobContext jobContext = getJobContext(buildId);
if (jobContext != null) {
JobExecutor jobExecutor = jobExecutors.get(jobContext.getJobToken());
if (jobExecutor != null)
jobExecutor.resume(jobContext);
JobRunnable jobRunnable = jobRunnables.get(jobContext.getJobToken());
if (jobRunnable != null)
jobRunnable.resume(jobContext);
}
return null;
}
@ -925,9 +850,9 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
@Override
public Void call() throws Exception {
JobContext jobContext = getJobContext(jobToken, true);
JobExecutor jobExecutor = jobExecutors.get(jobContext.getJobToken());
if (jobExecutor != null) {
Shell shell = jobExecutor.openShell(jobContext, terminal);
JobRunnable jobRunnable = jobRunnables.get(jobContext.getJobToken());
if (jobRunnable != null) {
Shell shell = jobRunnable.openShell(jobContext, terminal);
jobShells.put(terminal.getSessionId(), shell);
} else {
throw new ExplicitException("Job shell not ready");
@ -1181,7 +1106,7 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
} catch (InterruptedException e) {
}
}
scheduledTasks.values().stream().forEach(it1->it1.stream().forEach(it2->taskScheduler.unschedule(it2)));
scheduledTasks.values().forEach(it1-> it1.forEach(taskScheduler::unschedule));
scheduledTasks.clear();
}
@ -1294,9 +1219,9 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
jobLogger.log("Job finished");
} catch (TimeoutException e) {
build.setStatus(Build.Status.TIMED_OUT);
} catch (CancellationException e) {
if (e instanceof CancellerAwareCancellationException) {
Long cancellerId = ((CancellerAwareCancellationException) e).getCancellerId();
} catch (java.util.concurrent.CancellationException e) {
if (e instanceof CancellationException) {
Long cancellerId = ((CancellationException) e).getCancellerId();
if (cancellerId != null)
build.setCanceller(userManager.load(cancellerId));
}
@ -1308,7 +1233,7 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
jobLogger.error(explicitException.getMessage());
else if (ExceptionUtils.find(e, FailedException.class) == null)
jobLogger.error("Error running job", e);
} catch (InterruptedException e) {
} catch (InterruptedException ignored) {
} finally {
build.setFinishDate(new Date());
buildManager.save(build);
@ -1449,28 +1374,51 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
});
}
private void runJobLocal(String jobToken, AgentInfo agentInfo) {
JobContext jobContext = getJobContext(jobToken, true);
Long buildId = jobContext.getBuildId();
transactionManager.run(new Runnable() {
@Override
public void runJob(UUID serverUUID, ClusterRunnable runnable) {
Future<?> future = null;
try {
future = clusterManager.submitToServer(serverUUID, new ClusterTask<Void>() {
@Override
public void run() {
Build build = buildManager.load(buildId);
build.setStatus(Build.Status.RUNNING);
build.setRunningDate(new Date());
if (agentInfo != null)
build.setAgent(agentManager.load(agentInfo.getId()));
buildManager.save(build);
listenerRegistry.post(new BuildRunning(build));
private static final long serialVersionUID = 1L;
@Override
public Void call() throws Exception {
runnable.run();
return null;
}
});
// future.get() here does not respond to thread interruption
while (!future.isDone())
Thread.sleep(1000);
future.get(); // call get() to throw possible execution exceptions
} catch (InterruptedException e) {
if (future != null)
future.cancel(true);
throw new RuntimeException(e);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
@Override
public void runJobLocal(JobContext jobContext, JobRunnable runnable) {
while (thread == null) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
});
Long buildId = jobContext.getBuildId();
String jobToken = jobContext.getJobToken();
jobServers.put(jobToken, clusterManager.getLocalServerUUID());
JobExecutor jobExecutor = jobContext.getJobExecutor();
jobExecutors.put(jobToken, jobExecutor);
jobRunnables.put(jobToken, runnable);
try {
TaskLogger jobLogger = logManager.getJobLogger(jobToken);
if (jobLogger == null) {
@ -1478,21 +1426,21 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
@Override
public void log(String message, String sessionId) {
projectManager.runOnProjectServer(jobContext.getProjectId(), new LogTask(jobToken, message, sessionId));
projectManager.runOnProjectServer(jobContext.getProjectId(), new LogTask(jobToken, message, sessionId));
}
};
logManager.addJobLogger(jobToken, jobLogger);
try {
jobExecutor.execute(jobContext, jobLogger, agentInfo);
runnable.run(jobLogger);
} finally {
logManager.removeJobLogger(jobToken);
}
} else {
jobExecutor.execute(jobContext, jobLogger, agentInfo);
runnable.run(jobLogger);
}
} finally {
jobExecutors.remove(jobToken);
jobRunnables.remove(jobToken);
jobServers.remove(jobToken);
}
}
@ -1639,21 +1587,4 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz
}
}
private static class JobRunnable implements ResourceRunnable {
private static final long serialVersionUID = 1L;
private final String jobToken;
public JobRunnable(String jobToken) {
this.jobToken = jobToken;
}
@Override
public void run(AgentInfo agentInfo) {
OneDev.getInstance(DefaultJobManager.class).runJobLocal(jobToken, agentInfo);
}
}
}

View File

@ -1,210 +1,139 @@
package io.onedev.server.job;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.eclipse.jetty.websocket.api.Session;
import org.hibernate.query.Query;
import com.hazelcast.cluster.Member;
import com.hazelcast.cluster.MembershipEvent;
import com.hazelcast.cluster.MembershipListener;
import com.hazelcast.core.EntryEvent;
import com.hazelcast.core.EntryListener;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.map.IMap;
import com.hazelcast.map.MapEvent;
import com.hazelcast.replicatedmap.ReplicatedMap;
import io.onedev.agent.AgentData;
import io.onedev.commons.loader.ManagedSerializedForm;
import io.onedev.commons.utils.ExplicitException;
import io.onedev.server.OneDev;
import io.onedev.server.ServerConfig;
import io.onedev.server.cluster.ClusterManager;
import io.onedev.server.cluster.ClusterRunnable;
import io.onedev.server.cluster.ClusterTask;
import io.onedev.server.entitymanager.AgentManager;
import io.onedev.server.event.Listen;
import io.onedev.server.event.agent.AgentConnected;
import io.onedev.server.event.agent.AgentDisconnected;
import io.onedev.server.event.entity.EntityPersisted;
import io.onedev.server.event.entity.EntityRemoved;
import io.onedev.server.event.system.SystemStarted;
import io.onedev.server.model.AbstractEntity;
import io.onedev.server.model.Agent;
import io.onedev.server.persistence.SessionManager;
import io.onedev.server.persistence.TransactionManager;
import io.onedev.server.persistence.annotation.Sessional;
import io.onedev.server.persistence.annotation.Transactional;
import io.onedev.server.persistence.dao.Dao;
import io.onedev.server.search.entity.agent.AgentQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.SystemInfo;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.util.*;
import java.util.stream.Collectors;
@Singleton
public class DefaultResourceAllocator implements ResourceAllocator, Serializable {
private static final Logger logger = LoggerFactory.getLogger(DefaultResourceAllocator.class);
private final AgentManager agentManager;
private final ClusterManager clusterManager;
private final ExecutorService executorService;
private final ServerConfig serverConfig;
private final Map<String, QueryCache> queryCaches = new HashMap<>();
private final SessionManager sessionManager;
private final TransactionManager transactionManager;
private final Dao dao;
private final JobManager jobManager;
private volatile ReplicatedMap<UUID, Map<String, Integer>> serverResourceQuotas;
private final Object serverAllocSync = new Object();
private volatile IMap<UUID, Map<String, Integer>> serverResourceUsages;
private final Object agentAllocSync = new Object();
private volatile ReplicatedMap<Long, Map<String, Integer>> agentResourceQuotas;
private volatile ReplicatedMap<UUID, Integer> serverCpus;
private volatile ReplicatedMap<Long, Integer> agentCpus;
private volatile IMap<Long, Map<String, Integer>> agentResourceUsages;
private volatile IMap<String, Integer> serverUsed;
private volatile IMap<Long, Boolean> agentPaused;
private volatile IMap<String, Integer> agentUsed;
private volatile IMap<Long, Long> agentDisconnecting;
@Inject
public DefaultResourceAllocator(Dao dao, AgentManager agentManager,
SessionManager sessionManager, TransactionManager transactionManager,
ClusterManager clusterManager, ServerConfig serverConfig,
ExecutorService executorService) {
this.dao = dao;
public DefaultResourceAllocator(AgentManager agentManager, TransactionManager transactionManager,
ClusterManager clusterManager, JobManager jobManager) {
this.agentManager = agentManager;
this.sessionManager = sessionManager;
this.transactionManager = transactionManager;
this.clusterManager = clusterManager;
this.serverConfig = serverConfig;
this.executorService = executorService;
this.jobManager = jobManager;
}
public Object writeReplace() throws ObjectStreamException {
return new ManagedSerializedForm(ResourceAllocator.class);
}
private <K, V> EntryListener<K, V> newResourceChangeListener() {
return new EntryListener<K, V>() {
private void notifyResourceChange() {
synchronized (DefaultResourceAllocator.this) {
DefaultResourceAllocator.this.notifyAll();
}
}
@Override
public void entryAdded(EntryEvent<K, V> event) {
notifyResourceChange();
}
@Override
public void entryUpdated(EntryEvent<K, V> event) {
notifyResourceChange();
}
@Override
public void entryRemoved(EntryEvent<K, V> event) {
notifyResourceChange();
}
@Override
public void entryEvicted(EntryEvent<K, V> event) {
notifyResourceChange();
}
@Override
public void entryExpired(EntryEvent<K, V> event) {
notifyResourceChange();
}
@Override
public void mapCleared(MapEvent event) {
notifyResourceChange();
}
@Override
public void mapEvicted(MapEvent event) {
notifyResourceChange();
}
};
}
@SuppressWarnings("unchecked")
@Transactional
@Listen(10)
public void on(SystemStarted event) {
HazelcastInstance hazelcastInstance = clusterManager.getHazelcastInstance();
serverResourceQuotas = hazelcastInstance.getReplicatedMap("serverResourceQuotas");
serverResourceQuotas.addEntryListener(newResourceChangeListener());
Map<String, Integer> resourceCounts = new HashMap<>();
resourceCounts.put(CPU, serverConfig.getServerCpu());
resourceCounts.put(MEMORY, serverConfig.getServerMemory());
serverCpus = hazelcastInstance.getReplicatedMap("serverCpus");
UUID localServerUUID = clusterManager.getLocalServerUUID();
serverResourceQuotas.put(localServerUUID, resourceCounts);
serverResourceUsages = hazelcastInstance.getMap("serverResourceUsages");
serverResourceUsages.put(localServerUUID, new HashMap<>());
serverResourceUsages.addEntryListener(newResourceChangeListener(), false);
agentResourceQuotas = hazelcastInstance.getReplicatedMap("agentResourceQuotas");
agentResourceQuotas.addEntryListener(newResourceChangeListener());
agentResourceUsages = hazelcastInstance.getMap("agentResourceUsages");
agentResourceUsages.addEntryListener(newResourceChangeListener(), false);
agentPaused = hazelcastInstance.getMap("agentPaused");
agentPaused.addEntryListener(newResourceChangeListener(), false);
if (clusterManager.isLeaderServer()) {
Query<?> query = dao.getSession().createQuery(String.format("select id, %s from Agent", Agent.PROP_PAUSED));
for (Object[] fields: (List<Object[]>)query.list())
agentPaused.put((Long)fields[0], (Boolean)fields[1]);
try {
serverCpus.put(
localServerUUID,
new SystemInfo().getHardware().getProcessor().getLogicalProcessorCount());
} catch (Exception e) {
logger.debug("Error calling oshi", e);
serverCpus.put(localServerUUID, 4);
}
agentCpus = hazelcastInstance.getReplicatedMap("agentCpus");
serverUsed = hazelcastInstance.getMap("serverUsed");
agentUsed = hazelcastInstance.getMap("agentUsed");
agentDisconnecting = hazelcastInstance.getMap("agentDisconnecting");
hazelcastInstance.getCluster().addMembershipListener(new MembershipListener() {
@Override
public void memberRemoved(MembershipEvent membershipEvent) {
if (clusterManager.isLeaderServer()) {
serverResourceQuotas.remove(membershipEvent.getMember().getUuid());
serverResourceUsages.remove(membershipEvent.getMember().getUuid());
UUID removedServerUUID = membershipEvent.getMember().getUuid();
serverCpus.remove(removedServerUUID);
Set<String> keysToRemove = new HashSet<>();
for (var key: serverUsed.keySet()) {
if (key.startsWith(removedServerUUID.toString() + ":"))
keysToRemove.add(key);
}
for (var keyToRemove: keysToRemove)
serverUsed.remove(keyToRemove);
Set<Long> agentIdsToRemove = new HashSet<>();
for (var entry: agentManager.getAgentServers().entrySet()) {
if (entry.getValue().equals(membershipEvent.getMember().getUuid()))
if (entry.getValue().equals(removedServerUUID))
agentIdsToRemove.add(entry.getKey());
}
for (Long agentId: agentIdsToRemove) {
agentResourceQuotas.remove(agentId);
agentResourceUsages.remove(agentId);
keysToRemove.clear();
for (var agentId: agentIdsToRemove) {
agentCpus.remove(agentId);
for (var key: agentUsed.keySet()) {
if (key.startsWith(agentId + ":"))
keysToRemove.add(key);
}
}
for (var keyToRemove: keysToRemove)
agentUsed.remove(keyToRemove);
}
}
@Override
public void memberAdded(MembershipEvent membershipEvent) {
notifyServerAlloc();
}
});
@ -215,27 +144,46 @@ public class DefaultResourceAllocator implements ResourceAllocator, Serializable
@Listen
public void on(AgentConnected event) {
Long agentId = event.getAgent().getId();
sessionManager.runAsyncAfterCommit(new Runnable() {
Integer agentCpus = event.getAgent().getCpus();
transactionManager.runAfterCommit(new Runnable() {
@Override
public void run() {
// Synchronize at very start of the method to make sure it is not possible for db connection
// to wait for synchronization block
synchronized (DefaultResourceAllocator.this) {
Agent agent = agentManager.load(agentId);
agentResourceQuotas.put(agentId, agent.getResources());
agentResourceUsages.put(agentId, new HashMap<>());
for (QueryCache cache: queryCaches.values()) {
if (cache.query.matches(agent))
cache.result.add(agentId);
}
}
DefaultResourceAllocator.this.agentCpus.put(agentId, agentCpus);
agentDisconnecting.remove(agentId);
notifyAgentAlloc();
}
});
}
private void notifyServerAlloc() {
clusterManager.submitToAllServers((ClusterTask<Void>) () -> {
synchronized (serverAllocSync) {
serverAllocSync.notifyAll();
}
return null;
});
}
private void notifyAgentAlloc() {
clusterManager.submitToAllServers((ClusterTask<Void>) () -> {
synchronized (agentAllocSync) {
agentAllocSync.notifyAll();
}
return null;
});
}
@Listen
public void on(EntityPersisted event) {
if (event.getEntity() instanceof Agent) {
Agent agent = (Agent) event.getEntity();
if (!agent.isPaused())
notifyAgentAlloc();
}
}
@Transactional
@Listen
public void on(AgentDisconnected event) {
@ -244,178 +192,25 @@ public class DefaultResourceAllocator implements ResourceAllocator, Serializable
@Override
public void run() {
// Run in a separate thread to make sure it is not possible for db connection to
// wait for synchronization block
executorService.execute(new Runnable() {
@Override
public void run() {
agentResourceQuotas.remove(agentId);
agentResourceUsages.remove(agentId);
synchronized (DefaultResourceAllocator.this) {
for (QueryCache cache: queryCaches.values())
cache.result.remove(agentId);
}
}
});
agentCpus.remove(agentId);
Set<String> keysToRemove = new HashSet<>();
for (var key: agentUsed.keySet()) {
if (key.startsWith(agentId + ":"))
keysToRemove.add(key);
}
for (var keyToRemove: keysToRemove)
agentUsed.remove(keyToRemove);
}
});
}
@Transactional
@Listen
public void on(EntityPersisted event) {
if (event.getEntity() instanceof Agent) {
Long agentId = event.getEntity().getId();
boolean paused = ((Agent)event.getEntity()).isPaused();
transactionManager.runAfterCommit(new Runnable() {
@Override
public void run() {
agentPaused.put(agentId, paused);
}
});
}
}
@Transactional
@Listen
public void on(EntityRemoved event) {
if (event.getEntity() instanceof Agent) {
Long agentId = event.getEntity().getId();
transactionManager.runAfterCommit(new Runnable() {
@Override
public void run() {
synchronized (DefaultResourceAllocator.this) {
agentPaused.remove(agentId);
}
}
});
}
}
private int getAllocationScore(Map<String, Integer> resourceQuotas, Map<String, Integer> resourceUsages,
Map<String, Integer> resourceRequirements) {
for (Map.Entry<String, Integer> entry: resourceRequirements.entrySet()) {
Integer totalCount = resourceQuotas.get(entry.getKey());
if (totalCount == null)
totalCount = 0;
Integer usedCount = resourceUsages.get(entry.getKey());
if (usedCount == null)
usedCount = 0;
if (usedCount + entry.getValue() > totalCount)
return 0;
}
Integer cpuTotal = resourceQuotas.get(CPU);
if (cpuTotal == null)
cpuTotal = 0;
Integer memoryTotal = resourceQuotas.get(MEMORY);
if (memoryTotal == null)
memoryTotal = 0;
Integer cpuUsed = resourceUsages.get(CPU);
if (cpuUsed == null)
cpuUsed = 0;
Integer cpuRequired = resourceRequirements.get(CPU);
if (cpuRequired == null)
cpuRequired = 0;
cpuUsed += cpuRequired;
if (cpuUsed == 0)
cpuUsed = 1;
Integer memoryUsed = resourceUsages.get(CPU);
if (memoryUsed == null)
memoryUsed = 0;
Integer memoryRequired = resourceRequirements.get(CPU);
if (memoryRequired == null)
memoryRequired = 0;
memoryUsed += memoryRequired;
if (memoryUsed == 0)
memoryUsed = 1;
int score = cpuTotal*400/cpuUsed + memoryTotal*100/memoryUsed;
if (score <= 0)
score = 1;
return score;
}
private UUID allocateServer(Map<String, Integer> resourceRequirements) {
UUID allocatedServerUUID = null;
synchronized(this) {
while (true) {
int maxScore = 0;
for (Member server: clusterManager.getHazelcastInstance().getCluster().getMembers()) {
var totalResourceCounts = serverResourceQuotas.get(server.getUuid());
if (totalResourceCounts != null) {
var usedResourceCounts = serverResourceUsages.get(server.getUuid());
if (usedResourceCounts == null)
usedResourceCounts = new HashMap<>();
int score = getAllocationScore(totalResourceCounts, usedResourceCounts, resourceRequirements);
if (score > maxScore) {
allocatedServerUUID = server.getUuid();
maxScore = score;
}
}
}
if (allocatedServerUUID != null)
break;
try {
wait();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
return allocatedServerUUID;
}
private Long allocateAgent(AgentQuery agentQuery, Map<String, Integer> resourceRequirements) {
Set<Long> agentIds = agentManager.query(agentQuery, 0, Integer.MAX_VALUE)
.stream().map(it->it.getId()).collect(Collectors.toSet());
Long allocatedAgentId = 0L;
synchronized(this) {
String uuid = UUID.randomUUID().toString();
queryCaches.put(uuid, new QueryCache(agentQuery, agentIds));
try {
while (true) {
int maxScore = 0;
for (Long agentId: agentIds) {
Map<String, Integer> totalResourceCounts = agentResourceQuotas.get(agentId);
Boolean paused = agentPaused.get(agentId);
if (totalResourceCounts != null && paused != null && !paused) {
var usedResourceCounts = agentResourceUsages.get(agentId);
if (usedResourceCounts == null)
usedResourceCounts = new HashMap<>();
int score = getAllocationScore(totalResourceCounts, usedResourceCounts, resourceRequirements);
if (score > maxScore) {
allocatedAgentId = agentId;
maxScore = score;
}
}
}
if (allocatedAgentId != 0)
break;
try {
wait();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
} finally {
queryCaches.remove(uuid);
}
}
return allocatedAgentId;
private int getAllocationScore(int total, int used, int required) {
if (used + required <= total)
return total * 100 / (used + required);
else
return 0;
}
@Transactional
@ -423,139 +218,167 @@ public class DefaultResourceAllocator implements ResourceAllocator, Serializable
agentManager.load(agentId).setLastUsedDate(new Date());
}
private static class QueryCache {
AgentQuery query;
Collection<Long> result;
QueryCache(AgentQuery query, Collection<Long> result) {
this.query = query;
this.result = result;
}
}
@Override
public void waitingForAgentResourceToBeReleased(Long agentId) {
synchronized (this) {
Map<String, Integer> usedResourceCounts = agentResourceUsages.remove(agentId);
if (usedResourceCounts != null) {
while (usedResourceCounts.values().stream().anyMatch(it->it>0)) {
try {
wait();
} catch (InterruptedException e) {
}
public void wantToDisconnectAgent(Long agentId) {
agentDisconnecting.put(agentId, agentId);
while (true) {
boolean idle = true;
for (var entry : agentUsed.entrySet()) {
if (entry.getKey().startsWith(agentId + ":") && entry.getValue() > 0) {
idle = false;
break;
}
}
if (idle) {
break;
} else {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
}
private void increaseResourceCounts(Map<String, Integer> resourceCounts, Map<String, Integer> increment) {
for (Map.Entry<String, Integer> entry: increment.entrySet()) {
Integer count = resourceCounts.get(entry.getKey());
Integer newCount = (count != null? count + entry.getValue(): entry.getValue());
resourceCounts.put(entry.getKey(), newCount >= 0? newCount: 0);
}
}
private synchronized void increaseResourceUsages(UUID serverUUID, Map<String, Integer> increment) {
Map<String, Integer> resourceCounts = serverResourceUsages.get(serverUUID);
if (resourceCounts == null)
resourceCounts = new HashMap<>();
increaseResourceCounts(resourceCounts, increment);
serverResourceUsages.put(serverUUID, resourceCounts);
}
private synchronized void increaseResourceUsages(Long agentId, Map<String, Integer> increment) {
Map<String, Integer> resourceCounts = agentResourceUsages.get(agentId);
if (resourceCounts == null)
resourceCounts = new HashMap<>();
increaseResourceCounts(resourceCounts, increment);
agentResourceUsages.put(agentId, resourceCounts);
}
private Map<String, Integer> makeNegative(Map<String, Integer> map) {
Map<String, Integer> negative = new HashMap<>();
for (Map.Entry<String, Integer> entry: map.entrySet())
negative.put(entry.getKey(), entry.getValue() * -1);
return negative;
}
private AgentManager getAgentManager() {
return OneDev.getInstance(AgentManager.class);
}
@Sessional
protected AgentData getAgentData(Long agentId) {
return getAgentManager().load(agentId).getAgentData();
private <T> int getEffectiveTotal(Map<T, Integer> cpuMap, T key, int total) {
Integer effectiveTotal = total;
if (effectiveTotal == 0)
effectiveTotal = cpuMap.get(key);
if (effectiveTotal == null)
effectiveTotal = 0;
return effectiveTotal;
}
@Override
public void run(ResourceRunnable runnable, AgentQuery agentQuery, Map<String, Integer> resourceRequirements) {
Future<?> future = null;
try {
if (agentQuery != null) {
Long agentId = allocateAgent(agentQuery, resourceRequirements);
UUID serverUUID = getAgentManager().getAgentServers().get(agentId);
if (serverUUID == null)
throw new ExplicitException("Can not find server managing allocated agent, please retry later");
private <T> T allocate(Collection<T> pool, Map<T, Integer> cpuMap, Map<String, Integer> usedMap,
String resourceHolder, int total, int required) {
T allocated = null;
int maxScore = 0;
for (var each: pool) {
int effectiveTotal = getEffectiveTotal(cpuMap, each, total);
Integer used = usedMap.get(each + ":" + resourceHolder);
if (used == null)
used = 0;
int score = getAllocationScore(effectiveTotal, used, required);
future = clusterManager.submitToServer(serverUUID, new ClusterTask<Void>() {
private static final long serialVersionUID = 1L;
@Override
public Void call() throws Exception {
updateLastUsedDate(agentId);
AgentData agentData = getAgentData(agentId);
Session agentSession = getAgentManager().getAgentSession(agentId);
if (agentSession == null)
throw new ExplicitException("Allocated agent not connected to current server, please retry later");
increaseResourceUsages(agentId, resourceRequirements);
try {
runnable.run(new AgentInfo(agentId, agentData, agentSession));
} finally {
increaseResourceUsages(agentId, makeNegative(resourceRequirements));
}
return null;
}
});
} else {
UUID serverUUID = allocateServer(resourceRequirements);
future = clusterManager.submitToServer(serverUUID, new ClusterTask<Void>() {
private static final long serialVersionUID = 1L;
@Override
public Void call() throws Exception {
UUID localServerUUID = clusterManager.getLocalServerUUID();
increaseResourceUsages(localServerUUID, resourceRequirements);
try {
runnable.run(null);
} finally {
increaseResourceUsages(localServerUUID, makeNegative(resourceRequirements));
}
return null;
}
});
if (score > maxScore) {
allocated = each;
maxScore = score;
}
}
return allocated;
}
private boolean acquire(IMap<String, Integer> used, String key, int total, int required) {
while (true) {
Integer prevValue = used.get(key);
if (prevValue != null) {
if (prevValue + required <= total) {
if (used.replace(key, prevValue, prevValue + required))
return true;
} else {
return false;
}
} else {
if (required <= total) {
if (used.putIfAbsent(key, required) == null)
return true;
} else {
return false;
}
}
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
// future.get() here does not respond to thread interruption
while (!future.isDone())
Thread.sleep(1000);
future.get(); // call get() to throw possible execution exceptions
} catch (InterruptedException e) {
if (future != null)
future.cancel(true);
throw new RuntimeException(e);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
}
private void release(IMap<String, Integer> used, String key, int required) {
while (true) {
int prevValue = used.get(key);
if (used.replace(key, prevValue, prevValue - required))
break;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
@Override
public void runServerJob(String resourceHolder, int total, int required,
ClusterRunnable runnable) {
UUID serverUUID;
synchronized (serverAllocSync) {
while (true) {
Collection<UUID> serverUUIDs = clusterManager.getHazelcastInstance().getCluster().getMembers()
.stream().map(Member::getUuid).collect(Collectors.toSet());
serverUUID = allocate(serverUUIDs, serverCpus, serverUsed,
resourceHolder, total, required);
if (serverUUID != null) {
int effectiveTotal = getEffectiveTotal(serverCpus, serverUUID, total);
if (acquire(serverUsed, serverUUID + ":" + resourceHolder, effectiveTotal, required))
break;
}
try {
serverAllocSync.wait();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
try {
jobManager.runJob(serverUUID, runnable);
} finally {
release(serverUsed, serverUUID + ":" + resourceHolder, required);
notifyServerAlloc();
}
}
@Override
public void runAgentJob(AgentQuery agentQuery, String resourceHolder,
int total, int required, AgentRunnable runnable) {
Long agentId;
synchronized (agentAllocSync) {
while (true) {
Collection<Long> agentIds = agentManager.query(agentQuery, 0, Integer.MAX_VALUE)
.stream().filter(it-> it.isOnline() && !it.isPaused())
.map(AbstractEntity::getId)
.collect(Collectors.toSet());
agentIds.removeAll(agentDisconnecting.keySet());
agentId = allocate(agentIds, agentCpus, agentUsed, resourceHolder, total, required);
if (agentId != null) {
int effectiveTotal = getEffectiveTotal(agentCpus, agentId, total);
if (acquire(agentUsed, agentId + ":" + resourceHolder, effectiveTotal, required))
break;
}
try {
agentAllocSync.wait();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
try {
updateLastUsedDate(agentId);
UUID serverUUID = getAgentManager().getAgentServers().get(agentId);
if (serverUUID == null)
throw new ExplicitException("Can not find server managing allocated agent, please retry later");
Long finalAgentId = agentId;
jobManager.runJob(serverUUID, () -> runnable.run(finalAgentId));
} finally {
release(agentUsed, agentId + ":" + resourceHolder, required);
notifyAgentAlloc();
}
}
}

View File

@ -1,17 +1,15 @@
package io.onedev.server.job;
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.eclipse.jgit.lib.ObjectId;
import io.onedev.k8shelper.Action;
import io.onedev.k8shelper.LeafFacade;
import io.onedev.server.buildspec.Service;
import io.onedev.server.buildspec.job.CacheSpec;
import io.onedev.server.model.support.administration.jobexecutor.JobExecutor;
import org.eclipse.jgit.lib.ObjectId;
import java.io.Serializable;
import java.util.Collection;
import java.util.List;
public class JobContext implements Serializable {
@ -41,14 +39,12 @@ public class JobContext implements Serializable {
private final List<Service> services;
private final Map<String, Integer> resourceRequirements;
private final int retried;
public JobContext(String jobToken, JobExecutor jobExecutor, Long projectId, String projectPath,
String projectGitDir, Long buildId, Long buildNumber, List<Action> actions,
String refName, ObjectId commitId, Collection<CacheSpec> caches,
List<Service> services, Map<String, Integer> resourceRequirements, int retried) {
List<Service> services, int retried) {
this.jobToken = jobToken;
this.jobExecutor = jobExecutor;
this.projectId = projectId;
@ -61,7 +57,6 @@ public class JobContext implements Serializable {
this.commitId = commitId;
this.cacheSpecs = caches;
this.services = services;
this.resourceRequirements = resourceRequirements;
this.retried = retried;
}
@ -97,10 +92,6 @@ public class JobContext implements Serializable {
return services;
}
public Map<String, Integer> getResourceRequirements() {
return resourceRequirements;
}
public Long getProjectId() {
return projectId;
}

View File

@ -47,7 +47,7 @@ public class JobExecution {
if (isTimedout())
throw new TimeoutException();
else if (cancellerId != null)
throw new CancellerAwareCancellationException(cancellerId);
throw new CancellationException(cancellerId);
else
future.get();
}

View File

@ -3,9 +3,12 @@ package io.onedev.server.job;
import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import javax.annotation.Nullable;
import io.onedev.server.cluster.ClusterRunnable;
import io.onedev.server.model.support.administration.jobexecutor.JobExecutor;
import org.eclipse.jgit.lib.ObjectId;
import io.onedev.commons.utils.TaskLogger;
@ -34,6 +37,10 @@ public interface JobManager {
void cancel(Build build);
void resume(Build build);
void runJob(UUID serverUUID, ClusterRunnable runnable);
void runJobLocal(JobContext jobContext, JobRunnable runnable);
WebShell openShell(Long buildId, Terminal terminal);

View File

@ -0,0 +1,17 @@
package io.onedev.server.job;
import io.onedev.commons.utils.TaskLogger;
import io.onedev.server.terminal.Shell;
import io.onedev.server.terminal.Terminal;
import java.io.Serializable;
public interface JobRunnable extends Serializable {
void run(TaskLogger jobLogger);
void resume(JobContext jobContext);
Shell openShell(JobContext jobContext, Terminal terminal);
}

View File

@ -1,20 +1,15 @@
package io.onedev.server.job;
import java.util.Map;
import javax.annotation.Nullable;
import io.onedev.server.search.entity.agent.AgentQuery;
public interface ResourceAllocator {
static final String CPU = "cpu";
static final String MEMORY = "memory";
void run(ResourceRunnable runnable, @Nullable AgentQuery agentQuery,
Map<String, Integer> resourceRequirements);
void waitingForAgentResourceToBeReleased(Long agentId);
}
package io.onedev.server.job;
import io.onedev.server.cluster.ClusterRunnable;
import io.onedev.server.search.entity.agent.AgentQuery;
public interface ResourceAllocator {
void runServerJob(String resourceHolder, int total, int required, ClusterRunnable runnable);
void runAgentJob(AgentQuery agentQuery, String resourceHolder, int total, int required,
AgentRunnable runnable);
void wantToDisconnectAgent(Long agentId);
}

View File

@ -1,11 +0,0 @@
package io.onedev.server.job;
import java.io.Serializable;
import javax.annotation.Nullable;
public interface ResourceRunnable extends Serializable {
void run(@Nullable AgentInfo agentInfo);
}

View File

@ -4580,7 +4580,21 @@ public class DataMigrator {
"io.onedev.server.git.location.");
curlConfigElement.addAttribute("class", clazz);
}
} else if (key.equals("PERFORMANCE")) {
Element valueElement = element.element("value");
if (valueElement != null) {
int cpuIntensiveTaskConcurrency;
try {
HardwareAbstractionLayer hardware = new SystemInfo().getHardware();
cpuIntensiveTaskConcurrency = hardware.getProcessor().getLogicalProcessorCount();
} catch (Exception e) {
cpuIntensiveTaskConcurrency = 4;
}
valueElement.addElement("cpuIntensiveTaskConcurrency")
.setText(String.valueOf(cpuIntensiveTaskConcurrency));
}
}
}
dom.writeToFile(file, false);
} else if (file.getName().startsWith("Projects.xml")) {
@ -4598,6 +4612,15 @@ public class DataMigrator {
}
}
dom.writeToFile(file, false);
} else if (file.getName().startsWith("Agents.xml")) {
VersionedXmlDoc dom = VersionedXmlDoc.fromFile(file);
for (Element element : dom.getRootElement().elements()) {
element.element("memory").detach();
Element cpuElement = element.element("cpu");
cpuElement.setName("cpus");
cpuElement.setText(String.valueOf(Integer.parseInt(cpuElement.getTextTrim())/1000));
}
dom.writeToFile(file, false);
}
}
}

View File

@ -1,52 +1,25 @@
package io.onedev.server.model;
import static io.onedev.server.model.Agent.PROP_CPU;
import static io.onedev.server.model.Agent.PROP_IP_ADDRESS;
import static io.onedev.server.model.Agent.PROP_LAST_USED_DATE;
import static io.onedev.server.model.Agent.PROP_MEMORY;
import static io.onedev.server.model.Agent.PROP_NAME;
import static io.onedev.server.model.Agent.PROP_OS_ARCH;
import static io.onedev.server.model.Agent.PROP_OS_NAME;
import static io.onedev.server.model.Agent.PROP_OS_VERSION;
import static io.onedev.server.model.Agent.PROP_PAUSED;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.onedev.agent.AgentData;
import io.onedev.k8shelper.OsInfo;
import io.onedev.server.OneDev;
import io.onedev.server.entitymanager.AgentManager;
import io.onedev.server.job.ResourceAllocator;
import io.onedev.server.util.CollectionUtils;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import javax.persistence.*;
import java.util.*;
import static io.onedev.server.model.Agent.*;
@Entity
@Table(indexes={
@Index(columnList="o_token_id"), @Index(columnList=PROP_IP_ADDRESS),
@Index(columnList=PROP_PAUSED), @Index(columnList=PROP_NAME),
@Index(columnList=PROP_OS_NAME), @Index(columnList=PROP_OS_VERSION),
@Index(columnList=PROP_CPU), @Index(columnList=PROP_MEMORY),
@Index(columnList=PROP_OS_ARCH), @Index(columnList=PROP_LAST_USED_DATE)})
@Cache(usage=CacheConcurrencyStrategy.READ_WRITE)
public class Agent extends AbstractEntity {
@ -61,8 +34,6 @@ public class Agent extends AbstractEntity {
public static final String PROP_BUILDS = "builds";
public static final String PROP_ATTRIBUTES = "attributes";
public static final String NAME_IP_ADDRESS = "Ip Address";
public static final String PROP_IP_ADDRESS = "ipAddress";
@ -79,30 +50,20 @@ public class Agent extends AbstractEntity {
public static final String PROP_OS_ARCH = "osArch";
public static final String NAME_CPU = "CPU";
public static final String PROP_CPU = "cpu";
public static final String NAME_MEMORY = "Memory";
public static final String PROP_MEMORY = "memory";
public static final String PROP_LAST_USED_DATE = "lastUsedDate";
public static final Set<String> ALL_FIELDS = Sets.newHashSet(
NAME_NAME, NAME_IP_ADDRESS, NAME_OS_NAME, NAME_OS_VERSION, NAME_OS_ARCH, NAME_CPU, NAME_MEMORY);
NAME_NAME, NAME_IP_ADDRESS, NAME_OS_NAME, NAME_OS_VERSION, NAME_OS_ARCH);
public static final List<String> QUERY_FIELDS = Lists.newArrayList(
NAME_NAME, NAME_IP_ADDRESS, NAME_OS_NAME, NAME_OS_VERSION, NAME_OS_ARCH, NAME_CPU, NAME_MEMORY);
NAME_NAME, NAME_IP_ADDRESS, NAME_OS_NAME, NAME_OS_VERSION, NAME_OS_ARCH);
public static final Map<String, String> ORDER_FIELDS = CollectionUtils.newLinkedHashMap(
NAME_NAME, PROP_NAME,
NAME_IP_ADDRESS, PROP_IP_ADDRESS,
NAME_OS_NAME, PROP_OS_NAME,
NAME_OS_VERSION, PROP_OS_VERSION,
NAME_OS_ARCH, PROP_OS_ARCH,
NAME_CPU, PROP_CPU,
NAME_MEMORY, PROP_MEMORY);
NAME_OS_ARCH, PROP_OS_ARCH);
@ManyToOne(fetch=FetchType.LAZY)
@JoinColumn(nullable=false)
@ -129,9 +90,7 @@ public class Agent extends AbstractEntity {
@Column(nullable=false)
private String osArch;
private int cpu;
private int memory;
private int cpus;
private boolean temporal;
@ -197,20 +156,12 @@ public class Agent extends AbstractEntity {
this.paused = paused;
}
public int getCpu() {
return cpu;
public int getCpus() {
return cpus;
}
public void setCpu(int cpu) {
this.cpu = cpu;
}
public int getMemory() {
return memory;
}
public void setMemory(int memory) {
this.memory = memory;
public void setCpus(int cpus) {
this.cpus = cpus;
}
public boolean isTemporal() {
@ -250,16 +201,9 @@ public class Agent extends AbstractEntity {
return online;
}
public Map<String, Integer> getResources() {
Map<String, Integer> resources = new HashMap<>();
resources.put(ResourceAllocator.CPU, cpu);
resources.put(ResourceAllocator.MEMORY, memory);
return resources;
}
public AgentData getAgentData() {
return new AgentData(getToken().getValue(), new OsInfo(osName, osVersion, osArch),
name, ipAddress, cpu, memory, temporal, getAttributeMap());
name, ipAddress, cpus, temporal, getAttributeMap());
}
}

View File

@ -2,19 +2,48 @@ package io.onedev.server.model.support.administration;
import java.io.Serializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.onedev.server.web.editable.annotation.Editable;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
@Editable
public class PerformanceSetting implements Serializable {
private static final long serialVersionUID = 1;
private static final Logger logger= LoggerFactory.getLogger(PerformanceSetting.class);
private int cpuIntensiveTaskConcurrency;
private int maxGitLFSFileSize = 4096;
private int maxUploadFileSize = 20;
private int maxCodeSearchEntries = 100;
public PerformanceSetting() {
try {
HardwareAbstractionLayer hardware = new SystemInfo().getHardware();
cpuIntensiveTaskConcurrency = hardware.getProcessor().getLogicalProcessorCount();
} catch (Exception e) {
logger.debug("Error calling oshi", e);
cpuIntensiveTaskConcurrency = 4;
}
}
@Editable(order=100, name="CPU Intensive Task Concurrency", description="Specify max concurrent CPU intensive "
+ "tasks, such as Git repository pull/push, repository index, etc.")
public int getCpuIntensiveTaskConcurrency() {
return cpuIntensiveTaskConcurrency;
}
public void setCpuIntensiveTaskConcurrency(int cpuIntensiveTaskConcurrency) {
this.cpuIntensiveTaskConcurrency = cpuIntensiveTaskConcurrency;
}
@Editable(order=600, name="Max Git LFS File Size (MB)", description="Specify max git LFS file size in mega bytes")
public int getMaxGitLFSFileSize() {
return maxGitLFSFileSize;

View File

@ -1,207 +1,213 @@
package io.onedev.server.model.support.administration.jobexecutor;
import static java.nio.charset.StandardCharsets.UTF_8;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import javax.validation.constraints.NotEmpty;
import javax.ws.rs.core.Response;
import com.google.common.base.Splitter;
import com.google.common.base.Throwables;
import io.onedev.commons.loader.ExtensionPoint;
import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.TaskLogger;
import io.onedev.server.OneDev;
import io.onedev.server.ServerConfig;
import io.onedev.server.job.AgentInfo;
import io.onedev.server.job.JobContext;
import io.onedev.server.search.entity.agent.AgentQuery;
import io.onedev.server.terminal.Shell;
import io.onedev.server.terminal.Terminal;
import io.onedev.server.terminal.TerminalManager;
import io.onedev.server.util.ExceptionUtils;
import io.onedev.server.util.PKCS12CertExtractor;
import io.onedev.server.util.usage.Usage;
import io.onedev.server.util.validation.annotation.DnsName;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.JobAuthorization;
import io.onedev.server.web.editable.annotation.ShowCondition;
@ExtensionPoint
@Editable
public abstract class JobExecutor implements Serializable {
private static final long serialVersionUID = 1L;
private boolean enabled = true;
private String name;
private String jobAuthorization;
private boolean shellAccessEnabled;
private boolean sitePublishEnabled;
private int cacheTTL = 7;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
@Editable(order=10)
@DnsName //this name may be used as namespace/network prefixes, so put a strict constraint
@NotEmpty
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Editable(order=20, description="Enable this to allow project managers to open web terminal to running builds. "
+ "<b class='text-danger'>WARNING</b>: Users with shell access can take control of the node used by "
+ "the executor. You should configure job authorization below to make sure the executor can only be "
+ "used by trusted jobs if this option is enabled")
@ShowCondition("isTerminalSupported")
public boolean isShellAccessEnabled() {
return shellAccessEnabled;
}
public void setShellAccessEnabled(boolean shellAccessEnabled) {
this.shellAccessEnabled = shellAccessEnabled;
}
@Editable(order=30, description="Enable this to allow to run site publish step. OneDev will serve project "
+ "site files as is. To avoid XSS attack, make sure this executor can only be used by trusted jobs")
public boolean isSitePublishEnabled() {
return sitePublishEnabled;
}
public void setSitePublishEnabled(boolean sitePublishEnabled) {
this.sitePublishEnabled = sitePublishEnabled;
}
@SuppressWarnings("unused")
private static boolean isTerminalSupported() {
return OneDev.getInstance(TerminalManager.class).isTerminalSupported();
}
@Editable(order=10000, placeholder="Can be used by any jobs",
description="Optionally specify jobs authorized to use this executor")
@JobAuthorization
@Nullable
public String getJobAuthorization() {
return jobAuthorization;
}
public void setJobAuthorization(String jobAuthorization) {
this.jobAuthorization = jobAuthorization;
}
@Editable(order=50000, group="More Settings", description="Specify job cache TTL (time to live) by days. "
+ "OneDev may create multiple job caches even for same cache key to avoid cache conflicts when "
+ "running jobs concurrently. This setting tells OneDev to remove caches inactive for specified "
+ "time period to save disk space")
public int getCacheTTL() {
return cacheTTL;
}
public void setCacheTTL(int cacheTTL) {
this.cacheTTL = cacheTTL;
}
@Nullable
public abstract AgentQuery getAgentRequirement();
public abstract void execute(JobContext jobContext, TaskLogger jobLogger, @Nullable AgentInfo agentInfo);
public abstract void resume(JobContext jobContext);
public abstract Shell openShell(JobContext jobContext, Terminal terminal);
public boolean isPlaceholderAllowed() {
return true;
}
public Usage onDeleteProject(String projectPath) {
Usage usage = new Usage();
if (jobAuthorization != null
&& io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization).isUsingProject(projectPath)) {
usage.add("job requirement" );
}
return usage;
}
public void onMoveProject(String oldPath, String newPath) {
if (jobAuthorization != null) {
io.onedev.server.job.authorization.JobAuthorization parsedJobAuthorization =
io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization);
parsedJobAuthorization.onMoveProject(oldPath, newPath);
jobAuthorization = parsedJobAuthorization.toString();
}
}
public Usage onDeleteUser(String userName) {
Usage usage = new Usage();
if (jobAuthorization != null
&& io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization).isUsingUser(userName)) {
usage.add("job authorization" );
}
return usage;
}
public void onRenameUser(String oldName, String newName) {
if (jobAuthorization != null) {
io.onedev.server.job.authorization.JobAuthorization parsedJobAuthorization =
io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization);
parsedJobAuthorization.onRenameUser(oldName, newName);
jobAuthorization = parsedJobAuthorization.toString();
}
}
protected List<String> getTrustCertContent() {
List<String> trustCertContent = new ArrayList<>();
ServerConfig serverConfig = OneDev.getInstance(ServerConfig.class);
File keystoreFile = serverConfig.getKeystoreFile();
if (keystoreFile != null) {
String password = serverConfig.getKeystorePassword();
for (Map.Entry<String, String> entry: new PKCS12CertExtractor(keystoreFile, password).extact().entrySet())
trustCertContent.addAll(Splitter.on('\n').trimResults().splitToList(entry.getValue()));
}
if (serverConfig.getTrustCertsDir() != null) {
for (File file: serverConfig.getTrustCertsDir().listFiles()) {
if (file.isFile()) {
try {
trustCertContent.addAll(FileUtils.readLines(file, UTF_8));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
return trustCertContent;
}
protected String getErrorMessage(Exception exception) {
Response response = ExceptionUtils.buildResponse(exception);
if (response != null)
return response.getEntity().toString();
else
return Throwables.getStackTraceAsString(exception);
}
}
package io.onedev.server.model.support.administration.jobexecutor;
import com.google.common.base.Splitter;
import com.google.common.base.Throwables;
import io.onedev.commons.loader.ExtensionPoint;
import io.onedev.commons.utils.FileUtils;
import io.onedev.server.OneDev;
import io.onedev.server.ServerConfig;
import io.onedev.server.entitymanager.AgentManager;
import io.onedev.server.entitymanager.BuildManager;
import io.onedev.server.event.ListenerRegistry;
import io.onedev.server.event.project.build.BuildRunning;
import io.onedev.server.job.JobContext;
import io.onedev.server.model.Build;
import io.onedev.server.persistence.TransactionManager;
import io.onedev.server.terminal.TerminalManager;
import io.onedev.server.util.ExceptionUtils;
import io.onedev.server.util.PKCS12CertExtractor;
import io.onedev.server.util.usage.Usage;
import io.onedev.server.util.validation.annotation.DnsName;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.JobAuthorization;
import io.onedev.server.web.editable.annotation.ShowCondition;
import javax.annotation.Nullable;
import javax.validation.constraints.NotEmpty;
import javax.ws.rs.core.Response;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static java.nio.charset.StandardCharsets.UTF_8;
@ExtensionPoint
@Editable
public abstract class JobExecutor implements Serializable {
private static final long serialVersionUID = 1L;
private boolean enabled = true;
private String name;
private String jobAuthorization;
private boolean shellAccessEnabled;
private boolean sitePublishEnabled;
private int cacheTTL = 7;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
@Editable(order=10)
@DnsName //this name may be used as namespace/network prefixes, so put a strict constraint
@NotEmpty
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Editable(order=20, description="Enable this to allow project managers to open web terminal to running builds. "
+ "<b class='text-danger'>WARNING</b>: Users with shell access can take control of the node used by "
+ "the executor. You should configure job authorization below to make sure the executor can only be "
+ "used by trusted jobs if this option is enabled")
@ShowCondition("isTerminalSupported")
public boolean isShellAccessEnabled() {
return shellAccessEnabled;
}
public void setShellAccessEnabled(boolean shellAccessEnabled) {
this.shellAccessEnabled = shellAccessEnabled;
}
@Editable(order=30, description="Enable this to allow to run site publish step. OneDev will serve project "
+ "site files as is. To avoid XSS attack, make sure this executor can only be used by trusted jobs")
public boolean isSitePublishEnabled() {
return sitePublishEnabled;
}
public void setSitePublishEnabled(boolean sitePublishEnabled) {
this.sitePublishEnabled = sitePublishEnabled;
}
@SuppressWarnings("unused")
private static boolean isTerminalSupported() {
return OneDev.getInstance(TerminalManager.class).isTerminalSupported();
}
@Editable(order=10000, placeholder="Can be used by any jobs",
description="Optionally specify jobs authorized to use this executor")
@JobAuthorization
@Nullable
public String getJobAuthorization() {
return jobAuthorization;
}
public void setJobAuthorization(String jobAuthorization) {
this.jobAuthorization = jobAuthorization;
}
@Editable(order=50000, group="More Settings", description="Specify job cache TTL (time to live) by days. "
+ "OneDev may create multiple job caches even for same cache key to avoid cache conflicts when "
+ "running jobs concurrently. This setting tells OneDev to remove caches inactive for specified "
+ "time period to save disk space")
public int getCacheTTL() {
return cacheTTL;
}
public void setCacheTTL(int cacheTTL) {
this.cacheTTL = cacheTTL;
}
public abstract void execute(JobContext jobContext);
public boolean isPlaceholderAllowed() {
return true;
}
public Usage onDeleteProject(String projectPath) {
Usage usage = new Usage();
if (jobAuthorization != null
&& io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization).isUsingProject(projectPath)) {
usage.add("job requirement" );
}
return usage;
}
public void onMoveProject(String oldPath, String newPath) {
if (jobAuthorization != null) {
io.onedev.server.job.authorization.JobAuthorization parsedJobAuthorization =
io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization);
parsedJobAuthorization.onMoveProject(oldPath, newPath);
jobAuthorization = parsedJobAuthorization.toString();
}
}
public Usage onDeleteUser(String userName) {
Usage usage = new Usage();
if (jobAuthorization != null
&& io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization).isUsingUser(userName)) {
usage.add("job authorization" );
}
return usage;
}
public void onRenameUser(String oldName, String newName) {
if (jobAuthorization != null) {
io.onedev.server.job.authorization.JobAuthorization parsedJobAuthorization =
io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization);
parsedJobAuthorization.onRenameUser(oldName, newName);
jobAuthorization = parsedJobAuthorization.toString();
}
}
protected void notifyJobRunning(Long buildId, @Nullable Long agentId) {
OneDev.getInstance(TransactionManager.class).run(() -> {
BuildManager buildManager = OneDev.getInstance(BuildManager.class);
Build build = buildManager.load(buildId);
build.setStatus(Build.Status.RUNNING);
build.setRunningDate(new Date());
if (agentId != null)
build.setAgent(OneDev.getInstance(AgentManager.class).load(agentId));
buildManager.save(build);
OneDev.getInstance(ListenerRegistry.class).post(new BuildRunning(build));
});
}
protected List<String> getTrustCertContent() {
List<String> trustCertContent = new ArrayList<>();
ServerConfig serverConfig = OneDev.getInstance(ServerConfig.class);
File keystoreFile = serverConfig.getKeystoreFile();
if (keystoreFile != null) {
String password = serverConfig.getKeystorePassword();
for (Map.Entry<String, String> entry: new PKCS12CertExtractor(keystoreFile, password).extact().entrySet())
trustCertContent.addAll(Splitter.on('\n').trimResults().splitToList(entry.getValue()));
}
if (serverConfig.getTrustCertsDir() != null) {
for (File file: serverConfig.getTrustCertsDir().listFiles()) {
if (file.isFile()) {
try {
trustCertContent.addAll(FileUtils.readLines(file, UTF_8));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
return trustCertContent;
}
protected String getErrorMessage(Exception exception) {
Response response = ExceptionUtils.buildResponse(exception);
if (response != null)
return response.getEntity().toString();
else
return Throwables.getStackTraceAsString(exception);
}
}

View File

@ -1,29 +1,5 @@
package io.onedev.server.search.entity.agent;
import static io.onedev.server.model.Agent.NAME_CPU;
import static io.onedev.server.model.Agent.NAME_IP_ADDRESS;
import static io.onedev.server.model.Agent.NAME_MEMORY;
import static io.onedev.server.model.Agent.NAME_NAME;
import static io.onedev.server.model.Agent.NAME_OS_NAME;
import static io.onedev.server.model.Agent.NAME_OS_ARCH;
import static io.onedev.server.model.Agent.NAME_OS_VERSION;
import static io.onedev.server.model.Agent.ORDER_FIELDS;
import static io.onedev.server.model.Agent.QUERY_FIELDS;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
import org.antlr.v4.runtime.BailErrorStrategy;
import org.antlr.v4.runtime.BaseErrorListener;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.RecognitionException;
import org.antlr.v4.runtime.Recognizer;
import io.onedev.commons.codeassist.AntlrUtils;
import io.onedev.commons.utils.ExplicitException;
import io.onedev.server.OneDev;
@ -32,20 +8,19 @@ import io.onedev.server.model.Agent;
import io.onedev.server.search.entity.EntityQuery;
import io.onedev.server.search.entity.EntitySort;
import io.onedev.server.search.entity.EntitySort.Direction;
import io.onedev.server.search.entity.agent.AgentQueryParser.AndCriteriaContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.CriteriaContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.FieldOperatorValueCriteriaContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.NotCriteriaContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.OperatorCriteriaContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.OperatorValueCriteriaContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.OrCriteriaContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.OrderContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.ParensCriteriaContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.QueryContext;
import io.onedev.server.search.entity.agent.AgentQueryParser.*;
import io.onedev.server.util.criteria.AndCriteria;
import io.onedev.server.util.criteria.Criteria;
import io.onedev.server.util.criteria.NotCriteria;
import io.onedev.server.util.criteria.OrCriteria;
import org.antlr.v4.runtime.*;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static io.onedev.server.model.Agent.*;
public class AgentQuery extends EntityQuery<Agent> {
@ -154,19 +129,9 @@ public class AgentQuery extends EntityQuery<Agent> {
return new OsArchCriteria(value);
case NAME_IP_ADDRESS:
return new IpAddressCriteria(value);
case NAME_CPU:
return new CpuCriteria(value, operator);
case NAME_MEMORY:
return new MemoryCriteria(value, operator);
default:
return new AttributeCriteria(fieldName, value);
}
case AgentQueryLexer.IsGreaterThan:
case AgentQueryLexer.IsLessThan:
if (fieldName.equals(NAME_CPU))
return new CpuCriteria(value, operator);
else
return new MemoryCriteria(value, operator);
default:
throw new IllegalStateException();
}
@ -227,15 +192,6 @@ public class AgentQuery extends EntityQuery<Agent> {
Collection<String> attributeNames = OneDev.getInstance(AgentAttributeManager.class).getAttributeNames();
if (!QUERY_FIELDS.contains(fieldName) && !attributeNames.contains(fieldName))
throw new ExplicitException("Attribute not found: " + fieldName);
switch (operator) {
case AgentQueryLexer.IsGreaterThan:
case AgentQueryLexer.IsLessThan:
if (!fieldName.equals(NAME_CPU)
&& !fieldName.equals(NAME_MEMORY)) {
throw newOperatorException(fieldName, operator);
}
break;
}
}
public static String getRuleName(int rule) {

View File

@ -1,54 +0,0 @@
package io.onedev.server.search.entity.agent;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.From;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Predicate;
import io.onedev.server.model.Agent;
import io.onedev.server.search.entity.EntityQuery;
import io.onedev.server.util.criteria.Criteria;
public class CpuCriteria extends Criteria<Agent> {
private static final long serialVersionUID = 1L;
private int value;
private int operator;
public CpuCriteria(String value, int operator) {
this.value = EntityQuery.getIntValue(value);
this.operator = operator;
}
@Override
public Predicate getPredicate(CriteriaQuery<?> query, From<Agent, Agent> from, CriteriaBuilder builder) {
Path<Integer> attribute = from.get(Agent.PROP_CPU);
if (operator == AgentQueryLexer.IsGreaterThan)
return builder.greaterThan(attribute, value);
else if (operator == AgentQueryLexer.IsLessThan)
return builder.lessThan(attribute, value);
else
return builder.equal(attribute, value);
}
@Override
public boolean matches(Agent agent) {
if (operator == AgentQueryLexer.IsGreaterThan)
return agent.getCpu() > value;
else if (operator == AgentQueryLexer.IsLessThan)
return agent.getCpu() < value;
else
return agent.getCpu() < value;
}
@Override
public String toStringWithoutParens() {
return quote(Agent.NAME_CPU) + " "
+ AgentQuery.getRuleName(operator) + " "
+ quote(String.valueOf(value));
}
}

View File

@ -1,54 +0,0 @@
package io.onedev.server.search.entity.agent;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.From;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Predicate;
import io.onedev.server.model.Agent;
import io.onedev.server.search.entity.EntityQuery;
import io.onedev.server.util.criteria.Criteria;
public class MemoryCriteria extends Criteria<Agent> {
private static final long serialVersionUID = 1L;
private int value;
private int operator;
public MemoryCriteria(String value, int operator) {
this.value = EntityQuery.getIntValue(value);
this.operator = operator;
}
@Override
public Predicate getPredicate(CriteriaQuery<?> query, From<Agent, Agent> from, CriteriaBuilder builder) {
Path<Integer> attribute = from.get(Agent.PROP_MEMORY);
if (operator == AgentQueryLexer.IsGreaterThan)
return builder.greaterThan(attribute, value);
else if (operator == AgentQueryLexer.IsLessThan)
return builder.lessThan(attribute, value);
else
return builder.equal(attribute, value);
}
@Override
public boolean matches(Agent agent) {
if (operator == AgentQueryLexer.IsGreaterThan)
return agent.getMemory() > value;
else if (operator == AgentQueryLexer.IsLessThan)
return agent.getMemory() < value;
else
return agent.getMemory() < value;
}
@Override
public String toStringWithoutParens() {
return quote(Agent.NAME_MEMORY) + " "
+ AgentQuery.getRuleName(operator) + " "
+ quote(String.valueOf(value));
}
}

View File

@ -19,27 +19,32 @@ import javax.inject.Inject;
import javax.inject.Singleton;
import io.onedev.server.ServerConfig;
import io.onedev.server.entitymanager.SettingManager;
import io.onedev.server.security.SecurityUtils;
@Singleton
public class DefaultWorkExecutor implements WorkExecutor {
private final SettingManager settingManager;
private final ExecutorService executorService;
private final Map<String, Collection<PrioritizedCallable<?>>> runnings = new HashMap<>();
private final Map<String, Collection<WorkFuture<?>>> waitings = new HashMap<>();
private final int concurrency;
@Inject
public DefaultWorkExecutor(ExecutorService executorService, ServerConfig serverConfig) {
public DefaultWorkExecutor(ExecutorService executorService, SettingManager settingManager) {
this.executorService = executorService;
concurrency = serverConfig.getServerCpu() / 1000;
this.settingManager = settingManager;
}
private int getConcurrency() {
return settingManager.getPerformanceSetting().getCpuIntensiveTaskConcurrency();
}
private synchronized void check() {
if (concurrency > runnings.size()) {
if (getConcurrency() > runnings.size()) {
Map<String, Integer> averagePriorities = new HashMap<>();
for (Map.Entry<String, Collection<WorkFuture<?>>> entry: waitings.entrySet()) {
int totalPriorities = 0;
@ -64,7 +69,7 @@ public class DefaultWorkExecutor implements WorkExecutor {
runningsOfGroup.add(future.callable);
}
runnings.put(groupId, runningsOfGroup);
if (runnings.size() == concurrency)
if (runnings.size() == getConcurrency())
break;
}
}

View File

@ -1,13 +1,7 @@
package io.onedev.server.web.behavior;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import io.onedev.commons.codeassist.FenceAware;
import io.onedev.commons.codeassist.InputSuggestion;
import io.onedev.commons.codeassist.grammar.LexerRuleRefElementSpec;
@ -26,6 +20,11 @@ import io.onedev.server.util.DateUtils;
import io.onedev.server.web.behavior.inputassist.ANTLRAssistBehavior;
import io.onedev.server.web.behavior.inputassist.InputAssistBehavior;
import io.onedev.server.web.util.SuggestionUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@SuppressWarnings("serial")
public class AgentQueryBehavior extends ANTLRAssistBehavior {
@ -46,6 +45,7 @@ public class AgentQueryBehavior extends ANTLRAssistBehavior {
@Override
protected List<InputSuggestion> match(String matchWith) {
AgentManager agentManager = OneDev.getInstance(AgentManager.class);
AgentAttributeManager attributeManager = OneDev.getInstance(AgentAttributeManager.class);
if ("criteriaField".equals(spec.getLabel())) {
List<String> fields = new ArrayList<>(Agent.QUERY_FIELDS);
@ -73,14 +73,19 @@ public class AgentQueryBehavior extends ANTLRAssistBehavior {
String fieldName = AgentQuery.getValue(fieldElements.get(0).getMatchedText());
try {
AgentQuery.checkField(fieldName, operator);
if (fieldName.equals(Agent.NAME_OS_NAME))
return SuggestionUtils.suggest(OneDev.getInstance(AgentManager.class).getOsNames(), matchWith);
else if (fieldName.equals(Agent.NAME_NAME))
if (fieldName.equals(Agent.NAME_OS_NAME)) {
var osNames = new ArrayList<>(agentManager.getOsNames());
Collections.sort(osNames);
return SuggestionUtils.suggest(osNames, matchWith);
} else if (fieldName.equals(Agent.NAME_NAME)) {
return SuggestionUtils.suggestAgents(matchWith);
else if (fieldName.equals(Agent.NAME_OS_ARCH))
return SuggestionUtils.suggest(OneDev.getInstance(AgentManager.class).getOsArchs(), matchWith);
else
} else if (fieldName.equals(Agent.NAME_OS_ARCH)) {
var osArchs = new ArrayList<>(agentManager.getOsArchs());
Collections.sort(osArchs);
return SuggestionUtils.suggest(osArchs, matchWith);
} else {
return null;
}
} catch (ExplicitException ex) {
}
}

View File

@ -846,46 +846,6 @@ class AgentListPanel extends Panel {
});
columns.add(new AbstractColumn<Agent, Void>(Model.of("CPU")) {
@Override
public void populateItem(Item<ICellPopulator<Agent>> cellItem, String componentId, IModel<Agent> rowModel) {
Agent agent = rowModel.getObject();
cellItem.add(new Label(componentId, agent.getCpu()));
}
@Override
public String getCssClass() {
return "d-none d-xl-table-cell";
}
@Override
public Component getHeader(String componentId) {
return new Fragment(componentId, "cpuHeaderFrag", AgentListPanel.this);
}
});
columns.add(new AbstractColumn<Agent, Void>(Model.of("Memory")) {
@Override
public void populateItem(Item<ICellPopulator<Agent>> cellItem, String componentId, IModel<Agent> rowModel) {
Agent agent = rowModel.getObject();
cellItem.add(new Label(componentId, agent.getMemory()));
}
@Override
public String getCssClass() {
return "d-none d-xl-table-cell";
}
@Override
public Component getHeader(String componentId) {
return new Fragment(componentId, "memoryHeaderFrag", AgentListPanel.this);
}
});
columns.add(new AbstractColumn<Agent, Void>(Model.of("Temporal")) {
@Override

View File

@ -10,8 +10,6 @@
<th class="d-none d-xl-table-cell">IP Address</th>
<th class="d-none d-xl-table-cell">OS Version</th>
<th class="d-none d-xl-table-cell">OS Arch</th>
<th class="d-none d-xl-table-cell"><span class="mr-1">CPU</span> <a title="CPU capability in millis. This is normally (CPU cores)*1000"><wicket:svg href="question-o" class="icon icon-sm"/></a></th>
<th class="d-none d-xl-table-cell"><span class="mr-1">Memory</span> <a title="Physical memory in mega bytes"><wicket:svg href="question-o" class="icon icon-sm"/></a></th>
<th><span class="mr-1">Temporal</span> <a title="Temporal agent will be removed as soon as it goes offline"><wicket:svg href="question-o" class="icon icon-sm"/></a></th>
<th>Status</th>
</thead>
@ -32,12 +30,6 @@
<td class="d-none d-xl-table-cell">
<span wicket:id="osArch"></span>
</td>
<td class="d-none d-xl-table-cell">
<span wicket:id="cpu"></span>
</td>
<td class="d-none d-xl-table-cell">
<span wicket:id="memory"></span>
</td>
<td class="d-none d-xl-table-cell">
<span wicket:id="temporal"></span>
</td>

View File

@ -94,8 +94,6 @@ public class AgentOverviewPage extends AgentDetailPage {
add(new Label("ipAddress", getAgent().getIpAddress()));
add(new Label("osVersion", getAgent().getOsVersion()));
add(new Label("osArch", getAgent().getOsArch()));
add(new Label("cpu", getAgent().getCpu()));
add(new Label("memory", getAgent().getMemory()));
add(new Label("temporal", getAgent().isTemporal()));
add(new AgentStatusBadge("status", agentModel));

View File

@ -6,7 +6,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<build>
<resources>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.authenticator.ldap.LdapModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.buildspec.gradle.GradleModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.buildspec.maven.MavenModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.buildspec.node.NodePluginModule</moduleClass>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.executor.kubernetes.KubernetesModule</moduleClass>

View File

@ -1,45 +1,5 @@
package io.onedev.server.plugin.executor.kubernetes;
import static io.onedev.k8shelper.KubernetesHelper.ENV_JOB_TOKEN;
import static io.onedev.k8shelper.KubernetesHelper.ENV_OS_INFO;
import static io.onedev.k8shelper.KubernetesHelper.ENV_SERVER_URL;
import static io.onedev.k8shelper.KubernetesHelper.IMAGE_REPO_PREFIX;
import static io.onedev.k8shelper.KubernetesHelper.LOG_END_MESSAGE;
import static io.onedev.k8shelper.KubernetesHelper.parseStepPosition;
import static io.onedev.k8shelper.KubernetesHelper.stringifyStepPosition;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import javax.validation.constraints.NotEmpty;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.lang.SerializationUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.text.WordUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
@ -47,44 +7,27 @@ import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.onedev.agent.job.FailedException;
import io.onedev.commons.utils.ExceptionUtils;
import io.onedev.commons.utils.ExplicitException;
import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.StringUtils;
import io.onedev.commons.utils.TaskLogger;
import io.onedev.commons.utils.*;
import io.onedev.commons.utils.command.Commandline;
import io.onedev.commons.utils.command.ExecutionResult;
import io.onedev.commons.utils.command.LineConsumer;
import io.onedev.k8shelper.Action;
import io.onedev.k8shelper.BuildImageFacade;
import io.onedev.k8shelper.CommandFacade;
import io.onedev.k8shelper.CompositeFacade;
import io.onedev.k8shelper.ExecuteCondition;
import io.onedev.k8shelper.KubernetesHelper;
import io.onedev.k8shelper.LeafFacade;
import io.onedev.k8shelper.LeafVisitor;
import io.onedev.k8shelper.OsContainer;
import io.onedev.k8shelper.OsExecution;
import io.onedev.k8shelper.OsInfo;
import io.onedev.k8shelper.RegistryLoginFacade;
import io.onedev.k8shelper.RunContainerFacade;
import io.onedev.k8shelper.*;
import io.onedev.server.OneDev;
import io.onedev.server.ServerConfig;
import io.onedev.server.buildspec.Service;
import io.onedev.server.buildspec.job.EnvVar;
import io.onedev.server.cluster.ClusterManager;
import io.onedev.server.entitymanager.SettingManager;
import io.onedev.server.job.AgentInfo;
import io.onedev.server.job.JobContext;
import io.onedev.server.job.ResourceAllocator;
import io.onedev.server.job.JobManager;
import io.onedev.server.job.JobRunnable;
import io.onedev.server.model.support.RegistryLogin;
import io.onedev.server.model.support.administration.jobexecutor.JobExecutor;
import io.onedev.server.model.support.administration.jobexecutor.NodeSelectorEntry;
import io.onedev.server.model.support.administration.jobexecutor.ServiceLocator;
import io.onedev.server.model.support.inputspec.SecretInput;
import io.onedev.server.plugin.executor.kubernetes.KubernetesExecutor.TestData;
import io.onedev.server.search.entity.agent.AgentQuery;
import io.onedev.server.terminal.CommandlineShell;
import io.onedev.server.terminal.Shell;
import io.onedev.server.terminal.Terminal;
@ -94,6 +37,32 @@ import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Horizontal;
import io.onedev.server.web.editable.annotation.OmitName;
import io.onedev.server.web.util.Testable;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.lang.SerializationUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.text.WordUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml;
import javax.annotation.Nullable;
import javax.validation.constraints.NotEmpty;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.*;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static io.onedev.k8shelper.KubernetesHelper.*;
@Editable(order=KubernetesExecutor.ORDER, description="This executor runs build jobs as pods in a kubernetes cluster. "
+ "No any agents are required."
@ -106,8 +75,6 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
static final int ORDER = 40;
private static final int POD_WATCH_TIMEOUT = 60;
private static final Logger logger = LoggerFactory.getLogger(KubernetesExecutor.class);
private static final long NAMESPACE_DELETION_TIMEOUT = 120;
@ -128,6 +95,14 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
private boolean mountContainerSock;
private String cpuRequest = "250m";
private String memoryRequest = "256Mi";
private String cpuLimit;
private String memoryLimit;
private transient volatile OsInfo osInfo;
private transient volatile String containerName;
@ -173,10 +148,54 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
return mountContainerSock;
}
@Editable(order=400, description = "Specify cpu request for jobs using this executor. " +
"Check <a href='https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' target='_blank'>Kubernetes resource management</a> for details")
@NotEmpty
public String getCpuRequest() {
return cpuRequest;
}
public void setCpuRequest(String cpuRequest) {
this.cpuRequest = cpuRequest;
}
@Editable(order=500, description = "Specify memory request for jobs using this executor. " +
"Check <a href='https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' target='_blank'>Kubernetes resource management</a> for details")
@NotEmpty
public String getMemoryRequest() {
return memoryRequest;
}
public void setMemoryRequest(String memoryRequest) {
this.memoryRequest = memoryRequest;
}
public void setMountContainerSock(boolean mountContainerSock) {
this.mountContainerSock = mountContainerSock;
}
@Editable(order=24990, group="More Settings", placeholder = "No limit", description = "" +
"Optionally specify cpu limit for jobs using this executor. " +
"Check <a href='https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' target='_blank'>Kubernetes resource management</a> for details")
public String getCpuLimit() {
return cpuLimit;
}
public void setCpuLimit(String cpuLimit) {
this.cpuLimit = cpuLimit;
}
@Editable(order=24995, group="More Settings", placeholder = "No limit", description = "" +
"Optionally specify memory limit for jobs using this executor. " +
"Check <a href='https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/' target='_blank'>Kubernetes resource management</a> for details")
public String getMemoryLimit() {
return memoryLimit;
}
public void setMemoryLimit(String memoryLimit) {
this.memoryLimit = memoryLimit;
}
@Editable(order=25000, group="More Settings", description="Optionally specify where to run service pods "
+ "specified in job. The first matching locator will be used. If no any locators are found, "
+ "node selector of the executor will be used")
@ -212,13 +231,103 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
}
@Override
public AgentQuery getAgentRequirement() {
return null;
}
public void execute(JobContext jobContext) {
var servers = new ArrayList<>(OneDev.getInstance(ClusterManager.class)
.getHazelcastInstance().getCluster().getMembers());
var serverUUID = servers.get(RandomUtils.nextInt(0, servers.size())).getUuid();
getJobManager().runJob(serverUUID, ()-> {
getJobManager().runJobLocal(jobContext, new JobRunnable() {
@Override
public void execute(JobContext jobContext, TaskLogger jobLogger, AgentInfo agentInfo) {
execute(jobLogger, jobContext);
private static final long serialVersionUID = 1L;
@Override
public void run(TaskLogger jobLogger) {
execute(jobLogger, jobContext);
}
@Override
public void resume(JobContext jobContext) {
if (osInfo != null) {
Commandline kubectl = newKubeCtl();
kubectl.addArgs("exec", "job", "--container", "sidecar", "--namespace", getNamespace(jobContext), "--");
if (osInfo.isLinux())
kubectl.addArgs("touch", "/onedev-build/continue");
else
kubectl.addArgs("cmd", "-c", "copy", "NUL", "C:\\onedev-build\\continue");
kubectl.execute(new LineConsumer() {
@Override
public void consume(String line) {
logger.debug(line);
}
}, new LineConsumer() {
@Override
public void consume(String line) {
logger.error("Kubernetes: " + line);
}
}).checkReturnCode();
}
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
String containerNameCopy = containerName;
if (osInfo != null && containerNameCopy != null) {
Commandline kubectl = newKubeCtl();
kubectl.addArgs("exec", "-it", POD_NAME, "-c", containerNameCopy,
"--namespace", getNamespace(jobContext), "--");
String workingDir;
if (containerNameCopy.startsWith("step-")) {
List<Integer> stepPosition = parseStepPosition(containerNameCopy.substring("step-".length()));
LeafFacade step = Preconditions.checkNotNull(jobContext.getStep(stepPosition));
if (step instanceof RunContainerFacade)
workingDir = ((RunContainerFacade)step).getContainer(osInfo).getWorkingDir();
else if (osInfo.isLinux())
workingDir = "/onedev-build/workspace";
else
workingDir = "C:\\onedev-build\\workspace";
} else if (osInfo.isLinux()) {
workingDir = "/onedev-build/workspace";
} else {
workingDir = "C:\\onedev-build\\workspace";
}
String[] shell = null;
if (containerNameCopy.startsWith("step-")) {
List<Integer> stepPosition = parseStepPosition(containerNameCopy.substring("step-".length()));
LeafFacade step = Preconditions.checkNotNull(jobContext.getStep(stepPosition));
if (step instanceof CommandFacade)
shell = ((CommandFacade)step).getShell(osInfo.isWindows(), workingDir);
}
if (shell == null) {
if (workingDir != null) {
if (osInfo.isLinux())
shell = new String[]{"sh", "-c", String.format("cd '%s' && sh", workingDir)};
else
shell = new String[]{"cmd", "/c", String.format("cd %s && cmd", workingDir)};
} else if (osInfo.isLinux()) {
shell = new String[]{"sh"};
} else {
shell = new String[]{"cmd"};
}
}
kubectl.addArgs(shell);
return new CommandlineShell(terminal, kubectl);
} else {
throw new ExplicitException("Shell not ready");
}
}
});
});
}
private JobManager getJobManager() {
return OneDev.getInstance(JobManager.class);
}
private String getNamespace(@Nullable JobContext jobContext) {
@ -230,83 +339,6 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
}
}
@Override
public void resume(JobContext jobContext) {
if (osInfo != null) {
Commandline kubectl = newKubeCtl();
kubectl.addArgs("exec", "job", "--container", "sidecar", "--namespace", getNamespace(jobContext), "--");
if (osInfo.isLinux())
kubectl.addArgs("touch", "/onedev-build/continue");
else
kubectl.addArgs("cmd", "-c", "copy", "NUL", "C:\\onedev-build\\continue");
kubectl.execute(new LineConsumer() {
@Override
public void consume(String line) {
logger.debug(line);
}
}, new LineConsumer() {
@Override
public void consume(String line) {
logger.error("Kubernetes: " + line);
}
}).checkReturnCode();
}
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
String containerNameCopy = containerName;
if (osInfo != null && containerNameCopy != null) {
Commandline kubectl = newKubeCtl();
kubectl.addArgs("exec", "-it", POD_NAME, "-c", containerNameCopy,
"--namespace", getNamespace(jobContext), "--");
String workingDir;
if (containerNameCopy.startsWith("step-")) {
List<Integer> stepPosition = parseStepPosition(containerNameCopy.substring("step-".length()));
LeafFacade step = Preconditions.checkNotNull(jobContext.getStep(stepPosition));
if (step instanceof RunContainerFacade)
workingDir = ((RunContainerFacade)step).getContainer(osInfo).getWorkingDir();
else if (osInfo.isLinux())
workingDir = "/onedev-build/workspace";
else
workingDir = "C:\\onedev-build\\workspace";
} else if (osInfo.isLinux()) {
workingDir = "/onedev-build/workspace";
} else {
workingDir = "C:\\onedev-build\\workspace";
}
String[] shell = null;
if (containerNameCopy.startsWith("step-")) {
List<Integer> stepPosition = parseStepPosition(containerNameCopy.substring("step-".length()));
LeafFacade step = Preconditions.checkNotNull(jobContext.getStep(stepPosition));
if (step instanceof CommandFacade)
shell = ((CommandFacade)step).getShell(osInfo.isWindows(), workingDir);
}
if (shell == null) {
if (workingDir != null) {
if (osInfo.isLinux())
shell = new String[]{"sh", "-c", String.format("cd '%s' && sh", workingDir)};
else
shell = new String[]{"cmd", "/c", String.format("cd %s && cmd", workingDir)};
} else if (osInfo.isLinux()) {
shell = new String[]{"sh"};
} else {
shell = new String[]{"cmd"};
}
}
kubectl.addArgs(shell);
return new CommandlineShell(terminal, kubectl);
} else {
throw new ExplicitException("Shell not ready");
}
}
@Override
public boolean isPlaceholderAllowed() {
return false;
@ -419,7 +451,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
}).checkReturnCode();
}
private String createNamespace(String namespace, @Nullable JobContext jobContext, TaskLogger jobLogger) {
private void createNamespace(String namespace, @Nullable JobContext jobContext, TaskLogger jobLogger) {
AtomicBoolean namespaceExists = new AtomicBoolean(false);
Commandline kubectl = newKubeCtl();
kubectl.addArgs("get", "namespaces", "--field-selector", "metadata.name=" + namespace,
@ -460,8 +492,6 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
}
}).checkReturnCode();
return namespace;
}
private OsInfo getBaselineOsInfo(Collection<NodeSelectorEntry> nodeSelector, TaskLogger jobLogger) {
@ -590,8 +620,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
File keystoreFile = serverConfig.getKeystoreFile();
if (keystoreFile != null) {
String password = serverConfig.getKeystorePassword();
for (Map.Entry<String, String> entry: new PKCS12CertExtractor(keystoreFile, password).extact().entrySet())
configMapData.put(entry.getKey(), entry.getValue());
configMapData.putAll(new PKCS12CertExtractor(keystoreFile, password).extact());
}
File trustCertsDir = serverConfig.getTrustCertsDir();
if (trustCertsDir != null) {
@ -633,12 +662,21 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
}
Map<String, Object> podSpec = new LinkedHashMap<>();
Map<Object, Object> containerSpec = CollectionUtils.newHashMap(
Map<Object, Object> containerSpec = CollectionUtils.newLinkedHashMap(
"name", "default",
"image", jobService.getImage());
containerSpec.put("resources", CollectionUtils.newLinkedHashMap("requests", CollectionUtils.newLinkedHashMap(
"cpu", jobService.getCpuRequirement() + "m",
"memory", jobService.getMemoryRequirement() + "Mi")));
Map<Object, Object> resourcesSpec = CollectionUtils.newLinkedHashMap(
"requests", CollectionUtils.newLinkedHashMap(
"cpu", getCpuRequest(),
"memory", getMemoryRequest()));
Map<Object, Object> limitsSpec = new LinkedHashMap<>();
if (getCpuLimit() != null)
limitsSpec.put("cpu", getCpuLimit());
if (getMemoryLimit() != null)
limitsSpec.put("memory", getMemoryLimit());
if (!limitsSpec.isEmpty())
resourcesSpec.put("limits", limitsSpec);
containerSpec.put("resources", resourcesSpec);
List<Map<Object, Object>> envs = new ArrayList<>();
for (EnvVar envVar: jobService.getEnvVars()) {
envs.add(CollectionUtils.newLinkedHashMap(
@ -804,7 +842,6 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
}).checkReturnCode();
String namespace = getNamespace(jobContext);
if (getClusterRole() != null)
createClusterRoleBinding(namespace, jobLogger);
@ -812,7 +849,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
try {
createNamespace(namespace, jobContext, jobLogger);
jobLogger.log(String.format("Executing job (executor: %s, namespace: %s)...",
jobLogger.log(String.format("Preparing job (executor: %s, namespace: %s)...",
getName(), namespace));
try {
String imagePullSecretName = createImagePullSecret(namespace, jobLogger);
@ -1014,6 +1051,21 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
stepContainerSpec.put("args", Lists.newArrayList("/c", containerCommandHome + "\\" + positionStr + ".bat"));
}
Map<Object, Object> requestsSpec = CollectionUtils.newLinkedHashMap(
"cpu", "0",
"memory", "0");
Map<Object, Object> limitsSpec = new LinkedHashMap<>();
if (getCpuLimit() != null)
limitsSpec.put("cpu", getCpuLimit());
if (getMemoryLimit() != null)
limitsSpec.put("memory", getMemoryLimit());
if (!limitsSpec.isEmpty()) {
stepContainerSpec.put(
"resources", CollectionUtils.newLinkedHashMap(
"limits", limitsSpec,
"requests", requestsSpec));
}
containerSpecs.add(stepContainerSpec);
return null;
@ -1063,11 +1115,9 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
"env", commonEnvs,
"volumeMounts", commonVolumeMounts);
if (jobContext != null) {
sidecarContainerSpec.put("resources", CollectionUtils.newLinkedHashMap("requests", CollectionUtils.newLinkedHashMap(
"cpu", jobContext.getResourceRequirements().get(ResourceAllocator.CPU) + "m",
"memory", jobContext.getResourceRequirements().get(ResourceAllocator.MEMORY) + "Mi")));
}
sidecarContainerSpec.put("resources", CollectionUtils.newLinkedHashMap("requests", CollectionUtils.newLinkedHashMap(
"cpu", getCpuRequest(),
"memory", getMemoryRequest())));
containerSpecs.add(sidecarContainerSpec);
containerNames.add("sidecar");
@ -1126,7 +1176,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
AtomicReference<String> nodeNameRef = new AtomicReference<>(null);
watchPod(namespace, POD_NAME, new AbortChecker() {
watchPod(namespace, new AbortChecker() {
@Override
public Abort check(String nodeName, Collection<JsonNode> containerStatusNodes) {
@ -1140,6 +1190,9 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
}, jobLogger);
if (jobContext != null)
notifyJobRunning(jobContext.getBuildId(), null);
String nodeName = Preconditions.checkNotNull(nodeNameRef.get());
jobLogger.log("Running job on node " + nodeName + "...");
@ -1151,7 +1204,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
logger.debug("Waiting for start of container (pod: {}, container: {})...",
podFQN, containerName);
watchPod(namespace, POD_NAME, new AbortChecker() {
watchPod(namespace, new AbortChecker() {
@Override
public Abort check(String nodeName, Collection<JsonNode> containerStatusNodes) {
@ -1191,10 +1244,10 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
collectContainerLog(namespace, POD_NAME, containerName, LOG_END_MESSAGE, jobLogger);
logger.debug("Waiting for stop of container (pod: {})...",
logger.debug("Waiting for stop of container (pod: {}, container: {})...",
podFQN, containerName);
watchPod(namespace, POD_NAME, new AbortChecker() {
watchPod(namespace, new AbortChecker() {
@Override
public Abort check(String nodeName, Collection<JsonNode> containerStatusNodes) {
@ -1333,7 +1386,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
}
}
private void watchPod(String namespace, String podName, AbortChecker abortChecker, TaskLogger jobLogger) {
private void watchPod(String namespace, AbortChecker abortChecker, TaskLogger jobLogger) {
Commandline kubectl = newKubeCtl();
ObjectMapper mapper = OneDev.getInstance(ObjectMapper.class);
@ -1341,9 +1394,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
AtomicReference<Abort> abortRef = new AtomicReference<>(null);
StringBuilder json = new StringBuilder();
kubectl.addArgs("get", "pod", podName, "-n", namespace, "--watch", "-o", "json");
kubectl.timeout(POD_WATCH_TIMEOUT);
kubectl.addArgs("get", "pod", POD_NAME, "-n", namespace, "--watch", "-o", "json");
Thread thread = Thread.currentThread();

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -1,16 +1,5 @@
package io.onedev.server.plugin.executor.remotedocker;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
import org.eclipse.jetty.websocket.api.Session;
import io.onedev.agent.AgentData;
import io.onedev.agent.Message;
import io.onedev.agent.MessageTypes;
import io.onedev.agent.WebsocketUtils;
@ -21,13 +10,12 @@ import io.onedev.commons.utils.TaskLogger;
import io.onedev.server.OneDev;
import io.onedev.server.buildspec.Service;
import io.onedev.server.cluster.ClusterManager;
import io.onedev.server.job.AgentInfo;
import io.onedev.server.job.JobContext;
import io.onedev.server.job.ResourceAllocator;
import io.onedev.server.job.ResourceRunnable;
import io.onedev.server.entitymanager.AgentManager;
import io.onedev.server.job.*;
import io.onedev.server.job.log.LogManager;
import io.onedev.server.job.log.LogTask;
import io.onedev.server.model.support.RegistryLogin;
import io.onedev.server.persistence.SessionManager;
import io.onedev.server.plugin.executor.serverdocker.ServerDockerExecutor;
import io.onedev.server.search.entity.agent.AgentQuery;
import io.onedev.server.terminal.AgentShell;
@ -35,6 +23,15 @@ import io.onedev.server.terminal.Shell;
import io.onedev.server.terminal.Terminal;
import io.onedev.server.util.CollectionUtils;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Numeric;
import org.eclipse.jetty.websocket.api.Session;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
@Editable(order=210, description="This executor runs build jobs as docker containers on remote machines via <a href='/administration/agents' target='_blank'>agents</a>")
public class RemoteDockerExecutor extends ServerDockerExecutor {
@ -42,7 +39,7 @@ public class RemoteDockerExecutor extends ServerDockerExecutor {
private static final long serialVersionUID = 1L;
private String agentQuery;
private transient volatile Session agentSession;
@Editable(order=390, name="Agent Selector", placeholder="Any agent",
@ -56,41 +53,104 @@ public class RemoteDockerExecutor extends ServerDockerExecutor {
this.agentQuery = agentQuery;
}
@Editable(order=450, placeholder = "Number of agent cpu", description = "" +
"Specify max number of jobs/services this executor can run concurrently on each matched agent")
@Numeric
@Override
public AgentQuery getAgentRequirement() {
return AgentQuery.parse(agentQuery, true);
public String getConcurrency() {
return super.getConcurrency();
}
@Override
public void setConcurrency(String concurrency) {
super.setConcurrency(concurrency);
}
private AgentManager getAgentManager() {
return OneDev.getInstance(AgentManager.class);
}
private JobManager getJobManager() {
return OneDev.getInstance(JobManager.class);
}
private SessionManager getSessionManager() {
return OneDev.getInstance(SessionManager.class);
}
private int getConcurrencyNumber() {
if (getConcurrency() != null)
return Integer.parseInt(getConcurrency());
else
return 0;
}
@Override
public void execute(JobContext jobContext, TaskLogger jobLogger, AgentInfo agentInfo) {
jobLogger.log(String.format("Executing job (executor: %s, agent: %s)...",
getName(), agentInfo.getData().getName()));
public void execute(JobContext jobContext) {
AgentRunnable runnable = (agentId) -> {
getJobManager().runJobLocal(jobContext, new JobRunnable() {
@Override
public void run(TaskLogger jobLogger) {
notifyJobRunning(jobContext.getBuildId(), agentId);
var agentData = getSessionManager().call(
() -> getAgentManager().load(agentId).getAgentData());
List<Map<String, String>> registryLogins = new ArrayList<>();
for (RegistryLogin login: getRegistryLogins()) {
registryLogins.add(CollectionUtils.newHashMap(
"url", login.getRegistryUrl(),
"userName", login.getUserName(),
"password", login.getPassword()));
}
List<Map<String, Serializable>> services = new ArrayList<>();
for (Service service: jobContext.getServices())
services.add(service.toMap());
agentSession = getAgentManager().getAgentSession(agentId);
if (agentSession == null)
throw new ExplicitException("Allocated agent not connected to current server, please retry later");
String jobToken = jobContext.getJobToken();
List<String> trustCertContent = getTrustCertContent();
DockerJobData jobData = new DockerJobData(jobToken, getName(), jobContext.getProjectPath(),
jobContext.getProjectId(), jobContext.getRefName(), jobContext.getCommitId().name(),
jobContext.getBuildNumber(), jobContext.getActions(), jobContext.getRetried(),
services, registryLogins, isMountDockerSock(), getDockerSockPath(), trustCertContent, getRunOptions());
jobLogger.log(String.format("Executing job (executor: %s, agent: %s)...",
getName(), agentData.getName()));
List<Map<String, String>> registryLogins = new ArrayList<>();
for (RegistryLogin login : getRegistryLogins()) {
registryLogins.add(CollectionUtils.newHashMap(
"url", login.getRegistryUrl(),
"userName", login.getUserName(),
"password", login.getPassword()));
}
List<Map<String, Serializable>> services = new ArrayList<>();
for (Service service : jobContext.getServices())
services.add(service.toMap());
String jobToken = jobContext.getJobToken();
List<String> trustCertContent = getTrustCertContent();
DockerJobData jobData = new DockerJobData(jobToken, getName(), jobContext.getProjectPath(),
jobContext.getProjectId(), jobContext.getRefName(), jobContext.getCommitId().name(),
jobContext.getBuildNumber(), jobContext.getActions(), jobContext.getRetried(),
services, registryLogins, isMountDockerSock(), getDockerSockPath(), trustCertContent,
getCpuLimit(), getMemoryLimit(), getRunOptions());
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
}
}
@Override
public void resume(JobContext jobContext) {
if (agentSession != null )
new Message(MessageTypes.RESUME_JOB, jobContext.getJobToken()).sendBy(agentSession);
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
if (agentSession != null)
return new AgentShell(terminal, agentSession, jobContext.getJobToken());
else
throw new ExplicitException("Shell not ready");
}
});
};
agentSession = agentInfo.getSession();
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
}
getResourceAllocator().runAgentJob(
AgentQuery.parse(agentQuery, true), getName(), getConcurrencyNumber(),
jobContext.getServices().size()+1, runnable);
}
private LogManager getLogManager() {
@ -112,105 +172,68 @@ public class RemoteDockerExecutor extends ServerDockerExecutor {
try {
UUID localServerUUID = getClusterManager().getLocalServerUUID();
jobLogger.log("Waiting for resources...");
getResourceAllocator().run(
new TestRunnable(jobToken, this, testData, localServerUUID),
getAgentRequirement(), new HashMap<>());
AgentRunnable runnable = agentId -> {
TaskLogger currentJobLogger = new TaskLogger() {
@Override
public void log(String message, String sessionId) {
getClusterManager().runOnServer(
localServerUUID,
new LogTask(jobToken, message, sessionId));
}
};
var agentData = getSessionManager().call(
() -> getAgentManager().load(agentId).getAgentData());
Session agentSession = getAgentManager().getAgentSession(agentId);
if (agentSession == null)
throw new ExplicitException("Allocated agent not connected to current server, please retry later");
currentJobLogger.log(String.format("Testing on agent '%s'...", agentData.getName()));
List<Map<String, String>> registryLogins = new ArrayList<>();
for (RegistryLogin login: getRegistryLogins()) {
registryLogins.add(CollectionUtils.newHashMap(
"url", login.getRegistryUrl(),
"userName", login.getUserName(),
"password", login.getPassword()));
}
TestDockerJobData jobData = new TestDockerJobData(getName(), jobToken,
testData.getDockerImage(), registryLogins, getRunOptions());
if (getLogManager().getJobLogger(jobToken) == null) {
getLogManager().addJobLogger(jobToken, currentJobLogger);
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
} finally {
getLogManager().removeJobLogger(jobToken);
}
} else {
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
}
}
};
getResourceAllocator().runAgentJob(
AgentQuery.parse(agentQuery, true), getName(),
getConcurrencyNumber(), 1, runnable);
} finally {
getLogManager().removeJobLogger(jobToken);
}
}
private void testLocal(String jobToken, AgentInfo agentInfo,
TestData testData, UUID dispatcherServerUUID) {
TaskLogger jobLogger = new TaskLogger() {
@Override
public void log(String message, String sessionId) {
getClusterManager().runOnServer(
dispatcherServerUUID,
new LogTask(jobToken, message, sessionId));
}
};
AgentData agentData = agentInfo.getData();
Session agentSession = agentInfo.getSession();
jobLogger.log(String.format("Testing on agent '%s'...", agentData.getName()));
List<Map<String, String>> registryLogins = new ArrayList<>();
for (RegistryLogin login: getRegistryLogins()) {
registryLogins.add(CollectionUtils.newHashMap(
"url", login.getRegistryUrl(),
"userName", login.getUserName(),
"password", login.getPassword()));
}
TestDockerJobData jobData = new TestDockerJobData(getName(), jobToken,
testData.getDockerImage(), registryLogins, getRunOptions());
if (getLogManager().getJobLogger(jobToken) == null) {
getLogManager().addJobLogger(jobToken, jobLogger);
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
} finally {
getLogManager().removeJobLogger(jobToken);
}
} else {
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
}
}
}
@Override
public void resume(JobContext jobContext) {
if (agentSession != null )
new Message(MessageTypes.RESUME_JOB, jobContext.getJobToken()).sendBy(agentSession);
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
if (agentSession != null)
return new AgentShell(terminal, agentSession, jobContext.getJobToken());
else
throw new ExplicitException("Shell not ready");
}
@Override
public String getDockerExecutable() {
return super.getDockerExecutable();
}
private static class TestRunnable implements ResourceRunnable {
private static final long serialVersionUID = 1L;
private final String jobToken;
private final RemoteDockerExecutor jobExecutor;
private final TestData testData;
private final UUID dispatcherServerUUID;
public TestRunnable(String jobToken, RemoteDockerExecutor jobExecutor,
TestData testData, UUID dispatcherServerUUID) {
this.jobToken = jobToken;
this.jobExecutor = jobExecutor;
this.testData = testData;
this.dispatcherServerUUID = dispatcherServerUUID;
}
@Override
public void run(AgentInfo agentInfo) {
jobExecutor.testLocal(jobToken, agentInfo, testData, dispatcherServerUUID);
}
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -1,14 +1,5 @@
package io.onedev.server.plugin.executor.remoteshell;
import java.io.File;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
import org.eclipse.jetty.websocket.api.Session;
import io.onedev.agent.AgentData;
import io.onedev.agent.Message;
import io.onedev.agent.MessageTypes;
import io.onedev.agent.WebsocketUtils;
@ -19,12 +10,11 @@ import io.onedev.commons.utils.TaskLogger;
import io.onedev.server.OneDev;
import io.onedev.server.buildspec.job.CacheSpec;
import io.onedev.server.cluster.ClusterManager;
import io.onedev.server.job.AgentInfo;
import io.onedev.server.job.JobContext;
import io.onedev.server.job.ResourceAllocator;
import io.onedev.server.job.ResourceRunnable;
import io.onedev.server.entitymanager.AgentManager;
import io.onedev.server.job.*;
import io.onedev.server.job.log.LogManager;
import io.onedev.server.job.log.LogTask;
import io.onedev.server.persistence.SessionManager;
import io.onedev.server.plugin.executor.servershell.ServerShellExecutor;
import io.onedev.server.search.entity.agent.AgentQuery;
import io.onedev.server.terminal.AgentShell;
@ -32,6 +22,13 @@ import io.onedev.server.terminal.Shell;
import io.onedev.server.terminal.Terminal;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Horizontal;
import io.onedev.server.web.editable.annotation.Numeric;
import org.eclipse.jetty.websocket.api.Session;
import java.io.File;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.TimeoutException;
@Editable(order=500, name="Remote Shell Executor", description=""
+ "This executor runs build jobs with remote machines's shell facility via <a href='/administration/agents' target='_blank'>agents</a><br>"
@ -58,40 +55,92 @@ public class RemoteShellExecutor extends ServerShellExecutor {
this.agentQuery = agentQuery;
}
@Editable(order=1000, placeholder = "Number of agent cpu", description = "" +
"Specify max number of jobs this executor can run concurrently on " +
"each matched agent")
@Numeric
@Override
public AgentQuery getAgentRequirement() {
return AgentQuery.parse(agentQuery, true);
public String getConcurrency() {
return super.getConcurrency();
}
@Override
public void setConcurrency(String concurrency) {
super.setConcurrency(concurrency);
}
private int getConcurrencyNumber() {
if (getConcurrency() != null)
return Integer.parseInt(getConcurrency());
else
return 0;
}
@Override
public void execute(JobContext jobContext, TaskLogger jobLogger, AgentInfo agentInfo) {
jobLogger.log(String.format("Executing job (executor: %s, agent: %s)...",
getName(), agentInfo.getData().getName()));
public void execute(JobContext jobContext) {
AgentRunnable runnable = (agentId) -> {
getJobManager().runJobLocal(jobContext, new JobRunnable() {
if (!jobContext.getServices().isEmpty()) {
throw new ExplicitException("This job requires services, which can only be supported "
+ "by docker aware executors");
}
for (CacheSpec cacheSpec: jobContext.getCacheSpecs()) {
if (new File(cacheSpec.getPath()).isAbsolute()) {
throw new ExplicitException("Shell executor does not support "
+ "absolute cache path: " + cacheSpec.getPath());
}
}
String jobToken = jobContext.getJobToken();
List<String> trustCertContent = getTrustCertContent();
ShellJobData jobData = new ShellJobData(jobToken, getName(), jobContext.getProjectPath(),
jobContext.getProjectId(), jobContext.getRefName(), jobContext.getCommitId().name(),
jobContext.getBuildNumber(), jobContext.getActions(), trustCertContent);
private static final long serialVersionUID = 1L;
agentSession = agentInfo.getSession();
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
}
@Override
public void run(TaskLogger jobLogger) {
notifyJobRunning(jobContext.getBuildId(), agentId);
var agentData = getSessionManager().call(
() -> getAgentManager().load(agentId).getAgentData());
agentSession = getAgentManager().getAgentSession(agentId);
if (agentSession == null)
throw new ExplicitException("Allocated agent not connected to current server, please retry later");
jobLogger.log(String.format("Executing job (executor: %s, agent: %s)...",
getName(), agentData.getName()));
if (!jobContext.getServices().isEmpty()) {
throw new ExplicitException("This job requires services, which can only be supported "
+ "by docker aware executors");
}
for (CacheSpec cacheSpec : jobContext.getCacheSpecs()) {
if (new File(cacheSpec.getPath()).isAbsolute()) {
throw new ExplicitException("Shell executor does not support "
+ "absolute cache path: " + cacheSpec.getPath());
}
}
String jobToken = jobContext.getJobToken();
List<String> trustCertContent = getTrustCertContent();
ShellJobData jobData = new ShellJobData(jobToken, getName(), jobContext.getProjectPath(),
jobContext.getProjectId(), jobContext.getRefName(), jobContext.getCommitId().name(),
jobContext.getBuildNumber(), jobContext.getActions(), trustCertContent);
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
}
}
@Override
public void resume(JobContext jobContext) {
if (agentSession != null)
new Message(MessageTypes.RESUME_JOB, jobContext.getJobToken()).sendBy(agentSession);
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
if (agentSession != null)
return new AgentShell(terminal, agentSession, jobContext.getJobToken());
else
throw new ExplicitException("Shell not ready");
}
});
};
getResourceAllocator().runAgentJob(AgentQuery.parse(agentQuery, true), getName(),
getConcurrencyNumber(), 1, runnable);
}
private LogManager getLogManager() {
@ -102,101 +151,75 @@ public class RemoteShellExecutor extends ServerShellExecutor {
return OneDev.getInstance(ClusterManager.class);
}
public JobManager getJobManager() {
return OneDev.getInstance(JobManager.class);
}
private ResourceAllocator getResourceAllocator() {
return OneDev.getInstance(ResourceAllocator.class);
}
private AgentManager getAgentManager() {
return OneDev.getInstance(AgentManager.class);
}
private SessionManager getSessionManager() {
return OneDev.getInstance(SessionManager.class);
}
@Override
public void test(TestData testData, TaskLogger jobLogger) {
String jobToken = UUID.randomUUID().toString();
UUID localServerUUID = getClusterManager().getLocalServerUUID();
getLogManager().addJobLogger(jobToken, jobLogger);
try {
UUID localServerUUID = getClusterManager().getLocalServerUUID();
jobLogger.log("Waiting for resources...");
getResourceAllocator().run(
new TestRunnable(jobToken, this, testData, localServerUUID),
getAgentRequirement(), new HashMap<>());
AgentRunnable runnable = agentId -> {
TaskLogger currentJobLogger = new TaskLogger() {
@Override
public void log(String message, String sessionId) {
getClusterManager().runOnServer(
localServerUUID,
new LogTask(jobToken, message, sessionId));
}
};
var agentData = getSessionManager().call(
() -> getAgentManager().load(agentId).getAgentData());
Session agentSession = getAgentManager().getAgentSession(agentId);
if (agentSession == null)
throw new ExplicitException("Allocated agent not connected to current server, please retry later");
currentJobLogger.log(String.format("Testing on agent '%s'...", agentData.getName()));
TestShellJobData jobData = new TestShellJobData(jobToken, testData.getCommands());
if (getLogManager().getJobLogger(jobToken) == null) {
getLogManager().addJobLogger(jobToken, currentJobLogger);
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
} finally {
getLogManager().removeJobLogger(jobToken);
}
} else {
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
}
}
};
getResourceAllocator().runAgentJob(AgentQuery.parse(agentQuery, true), getName(),
getConcurrencyNumber(), 1, runnable);
} finally {
getLogManager().removeJobLogger(jobToken);
}
}
private void testLocal(String jobToken, AgentInfo agentInfo,
TestData testData, UUID dispatcherMemberUUID) {
TaskLogger jobLogger = new TaskLogger() {
@Override
public void log(String message, String sessionId) {
getClusterManager().runOnServer(
dispatcherMemberUUID,
new LogTask(jobToken, message, sessionId));
}
};
AgentData agentData = agentInfo.getData();
Session agentSession = agentInfo.getSession();
jobLogger.log(String.format("Testing on agent '%s'...", agentData.getName()));
TestShellJobData jobData = new TestShellJobData(jobToken, testData.getCommands());
if (getLogManager().getJobLogger(jobToken) == null) {
getLogManager().addJobLogger(jobToken, jobLogger);
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
} finally {
getLogManager().removeJobLogger(jobToken);
}
} else {
try {
WebsocketUtils.call(agentSession, jobData, 0);
} catch (InterruptedException | TimeoutException e) {
new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession);
}
}
}
@Override
public void resume(JobContext jobContext) {
if (agentSession != null)
new Message(MessageTypes.RESUME_JOB, jobContext.getJobToken()).sendBy(agentSession);
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
if (agentSession != null)
return new AgentShell(terminal, agentSession, jobContext.getJobToken());
else
throw new ExplicitException("Shell not ready");
}
private static class TestRunnable implements ResourceRunnable {
private static final long serialVersionUID = 1L;
private final String jobToken;
private final RemoteShellExecutor jobExecutor;
private final TestData testData;
private final UUID dispatcherServerUUID;
public TestRunnable(String jobToken, RemoteShellExecutor jobExecutor,
TestData testData, UUID dispatcherServerUUID) {
this.jobToken = jobToken;
this.jobExecutor = jobExecutor;
this.testData = testData;
this.dispatcherServerUUID = dispatcherServerUUID;
}
@Override
public void run(AgentInfo agentInfo) {
jobExecutor.testLocal(jobToken, agentInfo, testData, dispatcherServerUUID);
}
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.executor.serverdocker.ServerDockerModule</moduleClass>

View File

@ -1,88 +1,50 @@
package io.onedev.server.plugin.executor.serverdocker;
import static io.onedev.agent.DockerExecutorUtils.createNetwork;
import static io.onedev.agent.DockerExecutorUtils.deleteDir;
import static io.onedev.agent.DockerExecutorUtils.deleteNetwork;
import static io.onedev.agent.DockerExecutorUtils.isUseProcessIsolation;
import static io.onedev.agent.DockerExecutorUtils.newDockerKiller;
import static io.onedev.agent.DockerExecutorUtils.startService;
import static io.onedev.k8shelper.KubernetesHelper.cloneRepository;
import static io.onedev.k8shelper.KubernetesHelper.installGitCert;
import static io.onedev.k8shelper.KubernetesHelper.stringifyStepPosition;
import java.io.File;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import javax.validation.ConstraintValidatorContext;
import javax.validation.constraints.NotEmpty;
import org.apache.commons.lang3.SystemUtils;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.hazelcast.cluster.Member;
import io.onedev.agent.DockerExecutorUtils;
import io.onedev.agent.ExecutorUtils;
import io.onedev.agent.job.FailedException;
import io.onedev.commons.bootstrap.Bootstrap;
import io.onedev.commons.loader.AppLoader;
import io.onedev.commons.utils.ExplicitException;
import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.PathUtils;
import io.onedev.commons.utils.StringUtils;
import io.onedev.commons.utils.TaskLogger;
import io.onedev.commons.utils.*;
import io.onedev.commons.utils.command.Commandline;
import io.onedev.commons.utils.command.ExecutionResult;
import io.onedev.commons.utils.command.LineConsumer;
import io.onedev.k8shelper.BuildImageFacade;
import io.onedev.k8shelper.CacheAllocationRequest;
import io.onedev.k8shelper.CacheInstance;
import io.onedev.k8shelper.CheckoutFacade;
import io.onedev.k8shelper.CloneInfo;
import io.onedev.k8shelper.CommandFacade;
import io.onedev.k8shelper.CompositeFacade;
import io.onedev.k8shelper.JobCache;
import io.onedev.k8shelper.KubernetesHelper;
import io.onedev.k8shelper.LeafFacade;
import io.onedev.k8shelper.LeafHandler;
import io.onedev.k8shelper.OsContainer;
import io.onedev.k8shelper.OsExecution;
import io.onedev.k8shelper.OsInfo;
import io.onedev.k8shelper.RunContainerFacade;
import io.onedev.k8shelper.ServerSideFacade;
import io.onedev.k8shelper.*;
import io.onedev.server.OneDev;
import io.onedev.server.buildspec.Service;
import io.onedev.server.cluster.ClusterManager;
import io.onedev.server.cluster.ClusterRunnable;
import io.onedev.server.git.location.GitLocation;
import io.onedev.server.job.AgentInfo;
import io.onedev.server.job.JobContext;
import io.onedev.server.job.JobManager;
import io.onedev.server.job.JobRunnable;
import io.onedev.server.job.ResourceAllocator;
import io.onedev.server.model.support.RegistryLogin;
import io.onedev.server.model.support.administration.jobexecutor.JobExecutor;
import io.onedev.server.plugin.executor.serverdocker.ServerDockerExecutor.TestData;
import io.onedev.server.search.entity.agent.AgentQuery;
import io.onedev.server.terminal.CommandlineShell;
import io.onedev.server.terminal.Shell;
import io.onedev.server.terminal.Terminal;
import io.onedev.server.util.EditContext;
import io.onedev.server.util.validation.Validatable;
import io.onedev.server.util.validation.annotation.ClassValidating;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Horizontal;
import io.onedev.server.web.editable.annotation.OmitName;
import io.onedev.server.web.editable.annotation.ShowCondition;
import io.onedev.server.web.editable.annotation.*;
import io.onedev.server.web.util.Testable;
import org.apache.commons.lang3.SystemUtils;
import javax.annotation.Nullable;
import javax.validation.ConstraintValidatorContext;
import javax.validation.constraints.NotEmpty;
import java.io.File;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import static io.onedev.agent.DockerExecutorUtils.*;
import static io.onedev.k8shelper.KubernetesHelper.*;
@Editable(order=ServerDockerExecutor.ORDER, name="Server Docker Executor",
description="This executor runs build jobs as docker containers on OneDev server")
@ -106,6 +68,12 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
private String dockerSockPath;
private String cpuLimit;
private String memoryLimit;
private String concurrency;
private transient volatile File hostBuildHome;
private transient volatile LeafFacade runningStep;
@ -123,6 +91,17 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
this.registryLogins = registryLogins;
}
@Editable(order=450, placeholder = "Number of server cpu", description = "" +
"Specify max number of jobs/services this executor can run concurrently")
@Numeric
public String getConcurrency() {
return concurrency;
}
public void setConcurrency(String concurrency) {
this.concurrency = concurrency;
}
@Editable(order=500, group="More Settings", description="Whether or not to mount docker sock into job container to "
+ "support docker operations in job commands, for instance to build docker image.<br>"
+ "<b class='text-danger'>WARNING</b>: Malicious jobs can take control of whole OneDev "
@ -152,8 +131,31 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
this.dockerSockPath = dockerSockPath;
}
@Editable(order=50050, group="More Settings", description="Optionally specify options to run container. For instance, you may use <tt>-m 2g</tt> "
+ "to limit memory of created container to be 2 giga bytes")
@Editable(order=50010, group="More Settings", placeholder = "No limit", description = "" +
"Optionally specify cpu limit of jobs/services using this executor. This will be " +
"used as option <a href='https://docs.docker.com/config/containers/resource_constraints/#cpu' target='_blank'>--cpus</a> " +
"of relevant containers")
public String getCpuLimit() {
return cpuLimit;
}
public void setCpuLimit(String cpuLimit) {
this.cpuLimit = cpuLimit;
}
@Editable(order=50020, group="More Settings", placeholder = "No limit", description = "" +
"Optionally specify memory limit of jobs/services using this executor. This will be " +
"used as option <a href='https://docs.docker.com/config/containers/resource_constraints/#memory' target='_blank'>--memory</a> " +
"of relevant containers")
public String getMemoryLimit() {
return memoryLimit;
}
public void setMemoryLimit(String memoryLimit) {
this.memoryLimit = memoryLimit;
}
@Editable(order=50050, group="More Settings", description="Optionally specify options to run container")
public String getRunOptions() {
return runOptions;
}
@ -189,11 +191,6 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
}
return file;
}
@Override
public AgentQuery getAgentRequirement() {
return null;
}
private ClusterManager getClusterManager() {
return OneDev.getInstance(ClusterManager.class);
@ -203,300 +200,358 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
return OneDev.getInstance(JobManager.class);
}
@Override
public void execute(JobContext jobContext, TaskLogger jobLogger, AgentInfo agentInfo) {
if (OneDev.getK8sService() != null) {
throw new ExplicitException(""
+ "OneDev running inside kubernetes cluster does not support server docker executor. "
+ "Please use kubernetes executor instead");
}
hostBuildHome = FileUtils.createTempDir("onedev-build");
try {
String network = getName() + "-" + jobContext.getProjectId() + "-"
+ jobContext.getBuildNumber() + "-" + jobContext.getRetried();
Member member = getClusterManager().getHazelcastInstance().getCluster().getLocalMember();
jobLogger.log(String.format("Executing job (executor: %s, server: %s, network: %s)...", getName(),
member.getAddress().getHost() + ":" + member.getAddress().getPort(), network));
File hostCacheHome = getCacheHome(jobContext.getJobExecutor());
jobLogger.log("Setting up job cache...") ;
JobCache cache = new JobCache(hostCacheHome) {
@Override
protected Map<CacheInstance, String> allocate(CacheAllocationRequest request) {
return getJobManager().allocateCaches(jobContext, request);
}
@Override
protected void delete(File cacheDir) {
deleteDir(cacheDir, newDocker(), Bootstrap.isInDocker());
}
};
cache.init(false);
login(jobLogger);
createNetwork(newDocker(), network, jobLogger);
try {
OsInfo osInfo = OneDev.getInstance(OsInfo.class);
for (Service jobService: jobContext.getServices()) {
jobLogger.log("Starting service (name: " + jobService.getName() + ", image: " + jobService.getImage() + ")...");
startService(newDocker(), network, jobService.toMap(), osInfo, jobLogger);
}
File hostWorkspace = new File(hostBuildHome, "workspace");
FileUtils.createDir(hostWorkspace);
AtomicReference<File> hostAuthInfoHome = new AtomicReference<>(null);
try {
cache.installSymbolinks(hostWorkspace);
jobLogger.log("Copying job dependencies...");
getJobManager().copyDependencies(jobContext, hostWorkspace);
String containerBuildHome;
String containerWorkspace;
if (SystemUtils.IS_OS_WINDOWS) {
containerBuildHome = "C:\\onedev-build";
containerWorkspace = "C:\\onedev-build\\workspace";
} else {
containerBuildHome = "/onedev-build";
containerWorkspace = "/onedev-build/workspace";
}
getJobManager().reportJobWorkspace(jobContext, containerWorkspace);
CompositeFacade entryFacade = new CompositeFacade(jobContext.getActions());
boolean successful = entryFacade.execute(new LeafHandler() {
private int runStepContainer(String image, @Nullable String entrypoint,
List<String> arguments, Map<String, String> environments,
@Nullable String workingDir, Map<String, String> volumeMounts,
List<Integer> position, boolean useTTY) {
// Uninstall symbol links as docker can not process it well
cache.uninstallSymbolinks(hostWorkspace);
containerName = network + "-step-" + stringifyStepPosition(position);
try {
Commandline docker = newDocker();
docker.addArgs("run", "--name=" + containerName, "--network=" + network);
if (getRunOptions() != null)
docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions()));
docker.addArgs("-v", getHostPath(hostBuildHome.getAbsolutePath()) + ":" + containerBuildHome);
for (Map.Entry<String, String> entry: volumeMounts.entrySet()) {
if (entry.getKey().contains(".."))
throw new ExplicitException("Volume mount source path should not contain '..'");
String hostPath = getHostPath(new File(hostWorkspace, entry.getKey()).getAbsolutePath());
docker.addArgs("-v", hostPath + ":" + entry.getValue());
}
if (entrypoint != null) {
docker.addArgs("-w", containerWorkspace);
} else if (workingDir != null) {
if (workingDir.contains(".."))
throw new ExplicitException("Container working dir should not contain '..'");
docker.addArgs("-w", workingDir);
}
for (Map.Entry<CacheInstance, String> entry: cache.getAllocations().entrySet()) {
String hostCachePath = entry.getKey().getDirectory(hostCacheHome).getAbsolutePath();
String containerCachePath = PathUtils.resolve(containerWorkspace, entry.getValue());
docker.addArgs("-v", getHostPath(hostCachePath) + ":" + containerCachePath);
}
if (isMountDockerSock()) {
if (getDockerSockPath() != null) {
if (SystemUtils.IS_OS_WINDOWS)
docker.addArgs("-v", getDockerSockPath() + "://./pipe/docker_engine");
else
docker.addArgs("-v",getDockerSockPath() + ":/var/run/docker.sock");
} else {
if (SystemUtils.IS_OS_WINDOWS)
docker.addArgs("-v", "//./pipe/docker_engine://./pipe/docker_engine");
else
docker.addArgs("-v", "/var/run/docker.sock:/var/run/docker.sock");
}
}
if (hostAuthInfoHome.get() != null) {
String hostPath = getHostPath(hostAuthInfoHome.get().getAbsolutePath());
if (SystemUtils.IS_OS_WINDOWS) {
docker.addArgs("-v", hostPath + ":C:\\Users\\ContainerAdministrator\\auth-info");
docker.addArgs("-v", hostPath + ":C:\\Users\\ContainerUser\\auth-info");
} else {
docker.addArgs("-v", hostPath + ":/root/auth-info");
}
}
for (Map.Entry<String, String> entry: environments.entrySet())
docker.addArgs("-e", entry.getKey() + "=" + entry.getValue());
docker.addArgs("-e", "ONEDEV_WORKSPACE=" + containerWorkspace);
if (useTTY)
docker.addArgs("-t");
if (entrypoint != null)
docker.addArgs("--entrypoint=" + entrypoint);
if (isUseProcessIsolation(newDocker(), image, osInfo, jobLogger))
docker.addArgs("--isolation=process");
docker.addArgs(image);
docker.addArgs(arguments.toArray(new String[arguments.size()]));
ExecutionResult result = docker.execute(ExecutorUtils.newInfoLogger(jobLogger),
ExecutorUtils.newWarningLogger(jobLogger), null, newDockerKiller(newDocker(),
containerName, jobLogger));
return result.getReturnCode();
} finally {
containerName = null;
cache.installSymbolinks(hostWorkspace);
}
}
@Override
public boolean execute(LeafFacade facade, List<Integer> position) {
runningStep = facade;
try {
String stepNames = entryFacade.getNamesAsString(position);
jobLogger.notice("Running step \"" + stepNames + "\"...");
if (facade instanceof CommandFacade) {
CommandFacade commandFacade = (CommandFacade) facade;
OsExecution execution = commandFacade.getExecution(osInfo);
if (execution.getImage() == null) {
throw new ExplicitException("This step can only be executed by server shell "
+ "executor or remote shell executor");
}
Commandline entrypoint = DockerExecutorUtils.getEntrypoint(
hostBuildHome, commandFacade, osInfo, hostAuthInfoHome.get() != null);
int exitCode = runStepContainer(execution.getImage(), entrypoint.executable(),
entrypoint.arguments(), new HashMap<>(), null, new HashMap<>(),
position, commandFacade.isUseTTY());
if (exitCode != 0) {
jobLogger.error("Step \"" + stepNames + "\" is failed: Command exited with code " + exitCode);
return false;
}
} else if (facade instanceof BuildImageFacade || facade instanceof BuildImageFacade) {
DockerExecutorUtils.buildImage(newDocker(), (BuildImageFacade) facade,
hostBuildHome, jobLogger);
} else if (facade instanceof RunContainerFacade) {
RunContainerFacade rubContainerFacade = (RunContainerFacade) facade;
OsContainer container = rubContainerFacade.getContainer(osInfo);
List<String> arguments = new ArrayList<>();
if (container.getArgs() != null)
arguments.addAll(Arrays.asList(StringUtils.parseQuoteTokens(container.getArgs())));
int exitCode = runStepContainer(container.getImage(), null, arguments, container.getEnvMap(),
container.getWorkingDir(), container.getVolumeMounts(), position, rubContainerFacade.isUseTTY());
if (exitCode != 0) {
jobLogger.error("Step \"" + stepNames + "\" is failed: Container exited with code " + exitCode);
return false;
}
} else if (facade instanceof CheckoutFacade) {
try {
CheckoutFacade checkoutFacade = (CheckoutFacade) facade;
jobLogger.log("Checking out code...");
if (hostAuthInfoHome.get() == null)
hostAuthInfoHome.set(FileUtils.createTempDir());
Commandline git = new Commandline(AppLoader.getInstance(GitLocation.class).getExecutable());
checkoutFacade.setupWorkingDir(git, hostWorkspace);
git.environments().put("HOME", hostAuthInfoHome.get().getAbsolutePath());
CloneInfo cloneInfo = checkoutFacade.getCloneInfo();
cloneInfo.writeAuthData(hostAuthInfoHome.get(), git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
try {
List<String> trustCertContent = getTrustCertContent();
if (!trustCertContent.isEmpty()) {
installGitCert(new File(hostAuthInfoHome.get(), "trust-cert.pem"), trustCertContent,
git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
}
int cloneDepth = checkoutFacade.getCloneDepth();
cloneRepository(git, jobContext.getProjectGitDir(), cloneInfo.getCloneUrl(),
jobContext.getRefName(), jobContext.getCommitId().name(),
checkoutFacade.isWithLfs(), checkoutFacade.isWithSubmodules(),
cloneDepth, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
} finally {
git.clearArgs();
git.addArgs("config", "--global", "--unset", "core.sshCommand");
ExecutionResult result = git.execute(ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
if (result.getReturnCode() != 5 && result.getReturnCode() != 0)
result.checkReturnCode();
}
} catch (Exception e) {
jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e));
return false;
}
} else {
ServerSideFacade serverSideFacade = (ServerSideFacade) facade;
try {
serverSideFacade.execute(hostBuildHome, new ServerSideFacade.Runner() {
@Override
public Map<String, byte[]> run(File inputDir, Map<String, String> placeholderValues) {
return getJobManager().runServerStep(jobContext, position, inputDir,
placeholderValues, jobLogger);
}
});
} catch (Exception e) {
jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e));
return false;
}
}
jobLogger.success("Step \"" + stepNames + "\" is successful");
return true;
} finally {
runningStep = null;
}
}
@Override
public void skip(LeafFacade facade, List<Integer> position) {
jobLogger.notice("Step \"" + entryFacade.getNamesAsString(position) + "\" is skipped");
}
}, new ArrayList<>());
if (!successful)
throw new FailedException();
} finally {
cache.uninstallSymbolinks(hostWorkspace);
// Fix https://code.onedev.io/onedev/server/~issues/597
if (SystemUtils.IS_OS_WINDOWS)
FileUtils.deleteDir(hostWorkspace);
if (hostAuthInfoHome.get() != null)
FileUtils.deleteDir(hostAuthInfoHome.get());
}
} finally {
deleteNetwork(newDocker(), network, jobLogger);
}
} finally {
synchronized (hostBuildHome) {
deleteDir(hostBuildHome, newDocker(), Bootstrap.isInDocker());
}
}
private ResourceAllocator getResourceAllocator() {
return OneDev.getInstance(ResourceAllocator.class);
}
private int getConcurrencyNumber() {
if (getConcurrency() != null)
return Integer.parseInt(getConcurrency());
else
return 0;
}
@Override
public void resume(JobContext jobContext) {
if (hostBuildHome != null) synchronized (hostBuildHome) {
if (hostBuildHome.exists())
FileUtils.touchFile(new File(hostBuildHome, "continue"));
}
public void execute(JobContext jobContext) {
ClusterRunnable runnable = () -> {
getJobManager().runJobLocal(jobContext, new JobRunnable() {
private static final long serialVersionUID = 1L;
@Override
public void run(TaskLogger jobLogger) {
notifyJobRunning(jobContext.getBuildId(), null);
if (OneDev.getK8sService() != null) {
throw new ExplicitException(""
+ "OneDev running inside kubernetes cluster does not support server docker executor. "
+ "Please use kubernetes executor instead");
}
hostBuildHome = FileUtils.createTempDir("onedev-build");
try {
String network = getName() + "-" + jobContext.getProjectId() + "-"
+ jobContext.getBuildNumber() + "-" + jobContext.getRetried();
Member member = getClusterManager().getHazelcastInstance().getCluster().getLocalMember();
jobLogger.log(String.format("Executing job (executor: %s, server: %s, network: %s)...", getName(),
member.getAddress().getHost() + ":" + member.getAddress().getPort(), network));
File hostCacheHome = getCacheHome(jobContext.getJobExecutor());
jobLogger.log("Setting up job cache...");
JobCache cache = new JobCache(hostCacheHome) {
@Override
protected Map<CacheInstance, String> allocate(CacheAllocationRequest request) {
return getJobManager().allocateCaches(jobContext, request);
}
@Override
protected void delete(File cacheDir) {
deleteDir(cacheDir, newDocker(), Bootstrap.isInDocker());
}
};
cache.init(false);
login(jobLogger);
createNetwork(newDocker(), network, jobLogger);
try {
OsInfo osInfo = OneDev.getInstance(OsInfo.class);
for (Service jobService : jobContext.getServices()) {
jobLogger.log("Starting service (name: " + jobService.getName() + ", image: " + jobService.getImage() + ")...");
startService(newDocker(), network, jobService.toMap(), osInfo, getCpuLimit(), getMemoryLimit(), jobLogger);
}
File hostWorkspace = new File(hostBuildHome, "workspace");
FileUtils.createDir(hostWorkspace);
AtomicReference<File> hostAuthInfoHome = new AtomicReference<>(null);
try {
cache.installSymbolinks(hostWorkspace);
jobLogger.log("Copying job dependencies...");
getJobManager().copyDependencies(jobContext, hostWorkspace);
String containerBuildHome;
String containerWorkspace;
if (SystemUtils.IS_OS_WINDOWS) {
containerBuildHome = "C:\\onedev-build";
containerWorkspace = "C:\\onedev-build\\workspace";
} else {
containerBuildHome = "/onedev-build";
containerWorkspace = "/onedev-build/workspace";
}
getJobManager().reportJobWorkspace(jobContext, containerWorkspace);
CompositeFacade entryFacade = new CompositeFacade(jobContext.getActions());
boolean successful = entryFacade.execute(new LeafHandler() {
private int runStepContainer(String image, @Nullable String entrypoint,
List<String> arguments, Map<String, String> environments,
@Nullable String workingDir, Map<String, String> volumeMounts,
List<Integer> position, boolean useTTY) {
// Uninstall symbol links as docker can not process it well
cache.uninstallSymbolinks(hostWorkspace);
containerName = network + "-step-" + stringifyStepPosition(position);
try {
Commandline docker = newDocker();
docker.addArgs("run", "--name=" + containerName, "--network=" + network);
if (getCpuLimit() != null)
docker.addArgs("--cpus", getCpuLimit());
if (getMemoryLimit() != null)
docker.addArgs("--memory", getMemoryLimit());
if (getRunOptions() != null)
docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions()));
docker.addArgs("-v", getHostPath(hostBuildHome.getAbsolutePath()) + ":" + containerBuildHome);
for (Map.Entry<String, String> entry : volumeMounts.entrySet()) {
if (entry.getKey().contains(".."))
throw new ExplicitException("Volume mount source path should not contain '..'");
String hostPath = getHostPath(new File(hostWorkspace, entry.getKey()).getAbsolutePath());
docker.addArgs("-v", hostPath + ":" + entry.getValue());
}
if (entrypoint != null) {
docker.addArgs("-w", containerWorkspace);
} else if (workingDir != null) {
if (workingDir.contains(".."))
throw new ExplicitException("Container working dir should not contain '..'");
docker.addArgs("-w", workingDir);
}
for (Map.Entry<CacheInstance, String> entry : cache.getAllocations().entrySet()) {
String hostCachePath = entry.getKey().getDirectory(hostCacheHome).getAbsolutePath();
String containerCachePath = PathUtils.resolve(containerWorkspace, entry.getValue());
docker.addArgs("-v", getHostPath(hostCachePath) + ":" + containerCachePath);
}
if (isMountDockerSock()) {
if (getDockerSockPath() != null) {
if (SystemUtils.IS_OS_WINDOWS)
docker.addArgs("-v", getDockerSockPath() + "://./pipe/docker_engine");
else
docker.addArgs("-v", getDockerSockPath() + ":/var/run/docker.sock");
} else {
if (SystemUtils.IS_OS_WINDOWS)
docker.addArgs("-v", "//./pipe/docker_engine://./pipe/docker_engine");
else
docker.addArgs("-v", "/var/run/docker.sock:/var/run/docker.sock");
}
}
if (hostAuthInfoHome.get() != null) {
String hostPath = getHostPath(hostAuthInfoHome.get().getAbsolutePath());
if (SystemUtils.IS_OS_WINDOWS) {
docker.addArgs("-v", hostPath + ":C:\\Users\\ContainerAdministrator\\auth-info");
docker.addArgs("-v", hostPath + ":C:\\Users\\ContainerUser\\auth-info");
} else {
docker.addArgs("-v", hostPath + ":/root/auth-info");
}
}
for (Map.Entry<String, String> entry : environments.entrySet())
docker.addArgs("-e", entry.getKey() + "=" + entry.getValue());
docker.addArgs("-e", "ONEDEV_WORKSPACE=" + containerWorkspace);
if (useTTY)
docker.addArgs("-t");
if (entrypoint != null)
docker.addArgs("--entrypoint=" + entrypoint);
if (isUseProcessIsolation(newDocker(), image, osInfo, jobLogger))
docker.addArgs("--isolation=process");
docker.addArgs(image);
docker.addArgs(arguments.toArray(new String[arguments.size()]));
ExecutionResult result = docker.execute(ExecutorUtils.newInfoLogger(jobLogger),
ExecutorUtils.newWarningLogger(jobLogger), null, newDockerKiller(newDocker(),
containerName, jobLogger));
return result.getReturnCode();
} finally {
containerName = null;
cache.installSymbolinks(hostWorkspace);
}
}
@Override
public boolean execute(LeafFacade facade, List<Integer> position) {
runningStep = facade;
try {
String stepNames = entryFacade.getNamesAsString(position);
jobLogger.notice("Running step \"" + stepNames + "\"...");
if (facade instanceof CommandFacade) {
CommandFacade commandFacade = (CommandFacade) facade;
OsExecution execution = commandFacade.getExecution(osInfo);
if (execution.getImage() == null) {
throw new ExplicitException("This step can only be executed by server shell "
+ "executor or remote shell executor");
}
Commandline entrypoint = DockerExecutorUtils.getEntrypoint(
hostBuildHome, commandFacade, osInfo, hostAuthInfoHome.get() != null);
int exitCode = runStepContainer(execution.getImage(), entrypoint.executable(),
entrypoint.arguments(), new HashMap<>(), null, new HashMap<>(),
position, commandFacade.isUseTTY());
if (exitCode != 0) {
jobLogger.error("Step \"" + stepNames + "\" is failed: Command exited with code " + exitCode);
return false;
}
} else if (facade instanceof BuildImageFacade || facade instanceof BuildImageFacade) {
DockerExecutorUtils.buildImage(newDocker(), (BuildImageFacade) facade,
hostBuildHome, jobLogger);
} else if (facade instanceof RunContainerFacade) {
RunContainerFacade rubContainerFacade = (RunContainerFacade) facade;
OsContainer container = rubContainerFacade.getContainer(osInfo);
List<String> arguments = new ArrayList<>();
if (container.getArgs() != null)
arguments.addAll(Arrays.asList(StringUtils.parseQuoteTokens(container.getArgs())));
int exitCode = runStepContainer(container.getImage(), null, arguments, container.getEnvMap(),
container.getWorkingDir(), container.getVolumeMounts(), position, rubContainerFacade.isUseTTY());
if (exitCode != 0) {
jobLogger.error("Step \"" + stepNames + "\" is failed: Container exited with code " + exitCode);
return false;
}
} else if (facade instanceof CheckoutFacade) {
try {
CheckoutFacade checkoutFacade = (CheckoutFacade) facade;
jobLogger.log("Checking out code...");
if (hostAuthInfoHome.get() == null)
hostAuthInfoHome.set(FileUtils.createTempDir());
Commandline git = new Commandline(AppLoader.getInstance(GitLocation.class).getExecutable());
checkoutFacade.setupWorkingDir(git, hostWorkspace);
git.environments().put("HOME", hostAuthInfoHome.get().getAbsolutePath());
CloneInfo cloneInfo = checkoutFacade.getCloneInfo();
cloneInfo.writeAuthData(hostAuthInfoHome.get(), git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
try {
List<String> trustCertContent = getTrustCertContent();
if (!trustCertContent.isEmpty()) {
installGitCert(new File(hostAuthInfoHome.get(), "trust-cert.pem"), trustCertContent,
git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
}
int cloneDepth = checkoutFacade.getCloneDepth();
cloneRepository(git, jobContext.getProjectGitDir(), cloneInfo.getCloneUrl(),
jobContext.getRefName(), jobContext.getCommitId().name(),
checkoutFacade.isWithLfs(), checkoutFacade.isWithSubmodules(),
cloneDepth, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
} finally {
git.clearArgs();
git.addArgs("config", "--global", "--unset", "core.sshCommand");
ExecutionResult result = git.execute(ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
if (result.getReturnCode() != 5 && result.getReturnCode() != 0)
result.checkReturnCode();
}
} catch (Exception e) {
jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e));
return false;
}
} else {
ServerSideFacade serverSideFacade = (ServerSideFacade) facade;
try {
serverSideFacade.execute(hostBuildHome, new ServerSideFacade.Runner() {
@Override
public Map<String, byte[]> run(File inputDir, Map<String, String> placeholderValues) {
return getJobManager().runServerStep(jobContext, position, inputDir,
placeholderValues, jobLogger);
}
});
} catch (Exception e) {
jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e));
return false;
}
}
jobLogger.success("Step \"" + stepNames + "\" is successful");
return true;
} finally {
runningStep = null;
}
}
@Override
public void skip(LeafFacade facade, List<Integer> position) {
jobLogger.notice("Step \"" + entryFacade.getNamesAsString(position) + "\" is skipped");
}
}, new ArrayList<>());
if (!successful)
throw new FailedException();
} finally {
cache.uninstallSymbolinks(hostWorkspace);
// Fix https://code.onedev.io/onedev/server/~issues/597
if (SystemUtils.IS_OS_WINDOWS)
FileUtils.deleteDir(hostWorkspace);
if (hostAuthInfoHome.get() != null)
FileUtils.deleteDir(hostAuthInfoHome.get());
}
} finally {
deleteNetwork(newDocker(), network, jobLogger);
}
} finally {
synchronized (hostBuildHome) {
deleteDir(hostBuildHome, newDocker(), Bootstrap.isInDocker());
}
}
}
@Override
public void resume(JobContext jobContext) {
if (hostBuildHome != null) synchronized (hostBuildHome) {
if (hostBuildHome.exists())
FileUtils.touchFile(new File(hostBuildHome, "continue"));
}
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
String containerNameCopy = containerName;
if (containerNameCopy != null) {
Commandline docker = newDocker();
docker.addArgs("exec", "-it", containerNameCopy);
if (runningStep instanceof CommandFacade) {
CommandFacade commandStep = (CommandFacade) runningStep;
docker.addArgs(commandStep.getShell(SystemUtils.IS_OS_WINDOWS, null));
} else if (SystemUtils.IS_OS_WINDOWS) {
docker.addArgs("cmd");
} else {
docker.addArgs("sh");
}
return new CommandlineShell(terminal, docker);
} else if (hostBuildHome != null) {
Commandline shell;
if (SystemUtils.IS_OS_WINDOWS)
shell = new Commandline("cmd");
else
shell = new Commandline("sh");
shell.workingDir(new File(hostBuildHome, "workspace"));
return new CommandlineShell(terminal, shell);
} else {
throw new ExplicitException("Shell not ready");
}
}
});
};
getResourceAllocator().runServerJob(getName(), getConcurrencyNumber(),
jobContext.getServices().size() + 1, runnable);
}
private void login(TaskLogger jobLogger) {
@ -584,6 +639,10 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
jobLogger.log("Testing specified docker image...");
docker.clearArgs();
docker.addArgs("run", "--rm");
if (getCpuLimit() != null)
docker.addArgs("--cpus", getCpuLimit());
if (getMemoryLimit() != null)
docker.addArgs("--memory", getMemoryLimit());
if (getRunOptions() != null)
docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions()));
String containerWorkspacePath;
@ -673,32 +732,4 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
String containerNameCopy = containerName;
if (containerNameCopy != null) {
Commandline docker = newDocker();
docker.addArgs("exec", "-it", containerNameCopy);
if (runningStep instanceof CommandFacade) {
CommandFacade commandStep = (CommandFacade) runningStep;
docker.addArgs(commandStep.getShell(SystemUtils.IS_OS_WINDOWS, null));
} else if (SystemUtils.IS_OS_WINDOWS) {
docker.addArgs("cmd");
} else {
docker.addArgs("sh");
}
return new CommandlineShell(terminal, docker);
} else if (hostBuildHome != null) {
Commandline shell;
if (SystemUtils.IS_OS_WINDOWS)
shell = new Commandline("cmd");
else
shell = new Commandline("sh");
shell.workingDir(new File(hostBuildHome, "workspace"));
return new CommandlineShell(terminal, shell);
} else {
throw new ExplicitException("Shell not ready");
}
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.executor.servershell.ServerShellModule</moduleClass>

View File

@ -1,24 +1,6 @@
package io.onedev.server.plugin.executor.servershell;
import static io.onedev.agent.ShellExecutorUtils.testCommands;
import static io.onedev.k8shelper.KubernetesHelper.cloneRepository;
import static io.onedev.k8shelper.KubernetesHelper.installGitCert;
import static io.onedev.k8shelper.KubernetesHelper.replacePlaceholders;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.validation.constraints.Size;
import org.apache.commons.lang.SystemUtils;
import com.hazelcast.cluster.Member;
import io.onedev.agent.ExecutorUtils;
import io.onedev.agent.job.FailedException;
import io.onedev.commons.bootstrap.Bootstrap;
@ -28,37 +10,39 @@ import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.TaskLogger;
import io.onedev.commons.utils.command.Commandline;
import io.onedev.commons.utils.command.ExecutionResult;
import io.onedev.k8shelper.BuildImageFacade;
import io.onedev.k8shelper.CacheAllocationRequest;
import io.onedev.k8shelper.CacheInstance;
import io.onedev.k8shelper.CheckoutFacade;
import io.onedev.k8shelper.CloneInfo;
import io.onedev.k8shelper.CommandFacade;
import io.onedev.k8shelper.CompositeFacade;
import io.onedev.k8shelper.JobCache;
import io.onedev.k8shelper.LeafFacade;
import io.onedev.k8shelper.LeafHandler;
import io.onedev.k8shelper.OsExecution;
import io.onedev.k8shelper.OsInfo;
import io.onedev.k8shelper.RunContainerFacade;
import io.onedev.k8shelper.ServerSideFacade;
import io.onedev.k8shelper.*;
import io.onedev.server.OneDev;
import io.onedev.server.cluster.ClusterManager;
import io.onedev.server.cluster.ClusterRunnable;
import io.onedev.server.git.location.GitLocation;
import io.onedev.server.job.AgentInfo;
import io.onedev.server.job.JobContext;
import io.onedev.server.job.JobManager;
import io.onedev.server.job.JobRunnable;
import io.onedev.server.job.ResourceAllocator;
import io.onedev.server.model.support.administration.jobexecutor.JobExecutor;
import io.onedev.server.plugin.executor.servershell.ServerShellExecutor.TestData;
import io.onedev.server.search.entity.agent.AgentQuery;
import io.onedev.server.terminal.CommandlineShell;
import io.onedev.server.terminal.Shell;
import io.onedev.server.terminal.Terminal;
import io.onedev.server.util.validation.annotation.Code;
import io.onedev.server.web.editable.annotation.Editable;
import io.onedev.server.web.editable.annotation.Horizontal;
import io.onedev.server.web.editable.annotation.Numeric;
import io.onedev.server.web.editable.annotation.OmitName;
import io.onedev.server.web.util.Testable;
import org.apache.commons.lang.SystemUtils;
import javax.validation.constraints.Size;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static io.onedev.agent.ShellExecutorUtils.testCommands;
import static io.onedev.k8shelper.KubernetesHelper.*;
@Editable(order=ServerShellExecutor.ORDER, name="Server Shell Executor", description=""
+ "This executor runs build jobs with OneDev server's shell facility.<br>"
@ -72,11 +56,24 @@ public class ServerShellExecutor extends JobExecutor implements Testable<TestDat
static final int ORDER = 400;
private String concurrency;
private static final Object cacheHomeCreationLock = new Object();
private transient volatile LeafFacade runningStep;
private transient volatile File buildDir;
@Editable(order=1000, placeholder = "Number of server cpu",
description = "Specify max number of jobs this executor can run concurrently")
@Numeric
public String getConcurrency() {
return concurrency;
}
public void setConcurrency(String concurrency) {
this.concurrency = concurrency;
}
private File getCacheHome(JobExecutor jobExecutor) {
File file = new File(Bootstrap.getSiteDir(), "cache/" + jobExecutor.getName());
@ -85,11 +82,6 @@ public class ServerShellExecutor extends JobExecutor implements Testable<TestDat
}
return file;
}
@Override
public AgentQuery getAgentRequirement() {
return null;
}
private ClusterManager getClusterManager() {
return OneDev.getInstance(ClusterManager.class);
@ -99,187 +91,231 @@ public class ServerShellExecutor extends JobExecutor implements Testable<TestDat
return OneDev.getInstance(JobManager.class);
}
private ResourceAllocator getResourceAllocator() {
return OneDev.getInstance(ResourceAllocator.class);
}
private int getConcurrencyNumber() {
if (getConcurrency() != null)
return Integer.parseInt(getConcurrency());
else
return 0;
}
@Override
public void execute(JobContext jobContext, TaskLogger jobLogger, AgentInfo agentInfo) {
if (OneDev.getK8sService() != null) {
throw new ExplicitException(""
+ "OneDev running inside kubernetes cluster does not support server shell executor. "
+ "Please use kubernetes executor instead");
} else if (Bootstrap.isInDocker()) {
throw new ExplicitException("Server shell executor is only supported when OneDev is installed "
+ "directly on bare metal/virtual machine");
}
buildDir = FileUtils.createTempDir("onedev-build");
File workspaceDir = new File(buildDir, "workspace");
try {
Member server = getClusterManager().getHazelcastInstance().getCluster().getLocalMember();
jobLogger.log(String.format("Executing job (executor: %s, server: %s)...", getName(),
server.getAddress().getHost() + ":" + server.getAddress().getPort()));
jobLogger.log(String.format("Executing job with executor '%s'...", getName()));
if (!jobContext.getServices().isEmpty()) {
throw new ExplicitException("This job requires services, which can only be supported "
+ "by docker aware executors");
}
File cacheHomeDir = getCacheHome(jobContext.getJobExecutor());
jobLogger.log("Setting up job cache...") ;
JobCache cache = new JobCache(cacheHomeDir) {
public void execute(JobContext jobContext) {
ClusterRunnable runnable = () -> {
getJobManager().runJobLocal(jobContext, new JobRunnable() {
private static final long serialVersionUID = 1L;
@Override
protected Map<CacheInstance, String> allocate(CacheAllocationRequest request) {
return getJobManager().allocateCaches(jobContext, request);
}
public void run(TaskLogger jobLogger) {
notifyJobRunning(jobContext.getBuildId(), null);
if (OneDev.getK8sService() != null) {
throw new ExplicitException(""
+ "OneDev running inside kubernetes cluster does not support server shell executor. "
+ "Please use kubernetes executor instead");
} else if (Bootstrap.isInDocker()) {
throw new ExplicitException("Server shell executor is only supported when OneDev is installed "
+ "directly on bare metal/virtual machine");
}
@Override
protected void delete(File cacheDir) {
FileUtils.cleanDir(cacheDir);
}
};
cache.init(true);
FileUtils.createDir(workspaceDir);
cache.installSymbolinks(workspaceDir);
jobLogger.log("Copying job dependencies...");
getJobManager().copyDependencies(jobContext, workspaceDir);
File userDir = new File(buildDir, "user");
FileUtils.createDir(userDir);
getJobManager().reportJobWorkspace(jobContext, workspaceDir.getAbsolutePath());
CompositeFacade entryFacade = new CompositeFacade(jobContext.getActions());
OsInfo osInfo = OneDev.getInstance(OsInfo.class);
boolean successful = entryFacade.execute(new LeafHandler() {
@Override
public boolean execute(LeafFacade facade, List<Integer> position) {
runningStep = facade;
buildDir = FileUtils.createTempDir("onedev-build");
File workspaceDir = new File(buildDir, "workspace");
try {
String stepNames = entryFacade.getNamesAsString(position);
jobLogger.notice("Running step \"" + stepNames + "\"...");
if (facade instanceof CommandFacade) {
CommandFacade commandFacade = (CommandFacade) facade;
OsExecution execution = commandFacade.getExecution(osInfo);
if (execution.getImage() != null) {
throw new ExplicitException("This step can only be executed by server docker executor, "
+ "remote docker executor, or kubernetes executor");
}
commandFacade.generatePauseCommand(buildDir);
File jobScriptFile = new File(buildDir, "job-commands" + commandFacade.getScriptExtension());
try {
FileUtils.writeLines(
jobScriptFile,
new ArrayList<>(replacePlaceholders(execution.getCommands(), buildDir)),
commandFacade.getEndOfLine());
} catch (IOException e) {
throw new RuntimeException(e);
}
Commandline interpreter = commandFacade.getScriptInterpreter();
Map<String, String> environments = new HashMap<>();
environments.put("GIT_HOME", userDir.getAbsolutePath());
environments.put("ONEDEV_WORKSPACE", workspaceDir.getAbsolutePath());
interpreter.workingDir(workspaceDir).environments(environments);
interpreter.addArgs(jobScriptFile.getAbsolutePath());
ExecutionResult result = interpreter.execute(ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
if (result.getReturnCode() != 0) {
jobLogger.error("Step \"" + stepNames + "\" is failed: Command exited with code " + result.getReturnCode());
return false;
}
} else if (facade instanceof RunContainerFacade || facade instanceof BuildImageFacade) {
throw new ExplicitException("This step can only be executed by server docker executor, "
+ "remote docker executor, or kubernetes executor");
} else if (facade instanceof CheckoutFacade) {
try {
CheckoutFacade checkoutFacade = (CheckoutFacade) facade;
jobLogger.log("Checking out code...");
Commandline git = new Commandline(AppLoader.getInstance(GitLocation.class).getExecutable());
checkoutFacade.setupWorkingDir(git, workspaceDir);
Map<String, String> environments = new HashMap<>();
environments.put("HOME", userDir.getAbsolutePath());
git.environments(environments);
Member server = getClusterManager().getHazelcastInstance().getCluster().getLocalMember();
jobLogger.log(String.format("Executing job (executor: %s, server: %s)...", getName(),
server.getAddress().getHost() + ":" + server.getAddress().getPort()));
CloneInfo cloneInfo = checkoutFacade.getCloneInfo();
cloneInfo.writeAuthData(userDir, git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
List<String> trustCertContent = getTrustCertContent();
if (!trustCertContent.isEmpty()) {
installGitCert(new File(userDir, "trust-cert.pem"), trustCertContent,
git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
}
jobLogger.log(String.format("Executing job with executor '%s'...", getName()));
int cloneDepth = checkoutFacade.getCloneDepth();
cloneRepository(git, jobContext.getProjectGitDir(), cloneInfo.getCloneUrl(), jobContext.getRefName(),
jobContext.getCommitId().name(), checkoutFacade.isWithLfs(), checkoutFacade.isWithSubmodules(),
cloneDepth, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
} catch (Exception e) {
jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e));
return false;
}
} else {
ServerSideFacade serverSideFacade = (ServerSideFacade) facade;
try {
serverSideFacade.execute(buildDir, new ServerSideFacade.Runner() {
@Override
public Map<String, byte[]> run(File inputDir, Map<String, String> placeholderValues) {
return getJobManager().runServerStep(jobContext, position, inputDir,
placeholderValues, jobLogger);
}
});
} catch (Exception e) {
jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e));
return false;
}
if (!jobContext.getServices().isEmpty()) {
throw new ExplicitException("This job requires services, which can only be supported "
+ "by docker aware executors");
}
jobLogger.success("Step \"" + stepNames + "\" is successful");
return true;
File cacheHomeDir = getCacheHome(jobContext.getJobExecutor());
jobLogger.log("Setting up job cache...") ;
JobCache cache = new JobCache(cacheHomeDir) {
@Override
protected Map<CacheInstance, String> allocate(CacheAllocationRequest request) {
return getJobManager().allocateCaches(jobContext, request);
}
@Override
protected void delete(File cacheDir) {
FileUtils.cleanDir(cacheDir);
}
};
cache.init(true);
FileUtils.createDir(workspaceDir);
cache.installSymbolinks(workspaceDir);
jobLogger.log("Copying job dependencies...");
getJobManager().copyDependencies(jobContext, workspaceDir);
File userDir = new File(buildDir, "user");
FileUtils.createDir(userDir);
getJobManager().reportJobWorkspace(jobContext, workspaceDir.getAbsolutePath());
CompositeFacade entryFacade = new CompositeFacade(jobContext.getActions());
OsInfo osInfo = OneDev.getInstance(OsInfo.class);
boolean successful = entryFacade.execute(new LeafHandler() {
@Override
public boolean execute(LeafFacade facade, List<Integer> position) {
runningStep = facade;
try {
String stepNames = entryFacade.getNamesAsString(position);
jobLogger.notice("Running step \"" + stepNames + "\"...");
if (facade instanceof CommandFacade) {
CommandFacade commandFacade = (CommandFacade) facade;
OsExecution execution = commandFacade.getExecution(osInfo);
if (execution.getImage() != null) {
throw new ExplicitException("This step can only be executed by server docker executor, "
+ "remote docker executor, or kubernetes executor");
}
commandFacade.generatePauseCommand(buildDir);
File jobScriptFile = new File(buildDir, "job-commands" + commandFacade.getScriptExtension());
try {
FileUtils.writeLines(
jobScriptFile,
new ArrayList<>(replacePlaceholders(execution.getCommands(), buildDir)),
commandFacade.getEndOfLine());
} catch (IOException e) {
throw new RuntimeException(e);
}
Commandline interpreter = commandFacade.getScriptInterpreter();
Map<String, String> environments = new HashMap<>();
environments.put("GIT_HOME", userDir.getAbsolutePath());
environments.put("ONEDEV_WORKSPACE", workspaceDir.getAbsolutePath());
interpreter.workingDir(workspaceDir).environments(environments);
interpreter.addArgs(jobScriptFile.getAbsolutePath());
ExecutionResult result = interpreter.execute(ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
if (result.getReturnCode() != 0) {
jobLogger.error("Step \"" + stepNames + "\" is failed: Command exited with code " + result.getReturnCode());
return false;
}
} else if (facade instanceof RunContainerFacade || facade instanceof BuildImageFacade) {
throw new ExplicitException("This step can only be executed by server docker executor, "
+ "remote docker executor, or kubernetes executor");
} else if (facade instanceof CheckoutFacade) {
try {
CheckoutFacade checkoutFacade = (CheckoutFacade) facade;
jobLogger.log("Checking out code...");
Commandline git = new Commandline(AppLoader.getInstance(GitLocation.class).getExecutable());
checkoutFacade.setupWorkingDir(git, workspaceDir);
Map<String, String> environments = new HashMap<>();
environments.put("HOME", userDir.getAbsolutePath());
git.environments(environments);
CloneInfo cloneInfo = checkoutFacade.getCloneInfo();
cloneInfo.writeAuthData(userDir, git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
List<String> trustCertContent = getTrustCertContent();
if (!trustCertContent.isEmpty()) {
installGitCert(new File(userDir, "trust-cert.pem"), trustCertContent,
git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
}
int cloneDepth = checkoutFacade.getCloneDepth();
cloneRepository(git, jobContext.getProjectGitDir(), cloneInfo.getCloneUrl(), jobContext.getRefName(),
jobContext.getCommitId().name(), checkoutFacade.isWithLfs(), checkoutFacade.isWithSubmodules(),
cloneDepth, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger));
} catch (Exception e) {
jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e));
return false;
}
} else {
ServerSideFacade serverSideFacade = (ServerSideFacade) facade;
try {
serverSideFacade.execute(buildDir, new ServerSideFacade.Runner() {
@Override
public Map<String, byte[]> run(File inputDir, Map<String, String> placeholderValues) {
return getJobManager().runServerStep(jobContext, position, inputDir,
placeholderValues, jobLogger);
}
});
} catch (Exception e) {
jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e));
return false;
}
}
jobLogger.success("Step \"" + stepNames + "\" is successful");
return true;
} finally {
runningStep = null;
}
}
@Override
public void skip(LeafFacade facade, List<Integer> position) {
jobLogger.notice("Step \"" + entryFacade.getNamesAsString(position) + "\" is skipped");
}
}, new ArrayList<>());
if (!successful)
throw new FailedException();
} finally {
runningStep = null;
// Fix https://code.onedev.io/onedev/server/~issues/597
if (SystemUtils.IS_OS_WINDOWS && workspaceDir.exists())
FileUtils.deleteDir(workspaceDir);
synchronized (buildDir) {
FileUtils.deleteDir(buildDir);
}
}
}
@Override
public void skip(LeafFacade facade, List<Integer> position) {
jobLogger.notice("Step \"" + entryFacade.getNamesAsString(position) + "\" is skipped");
public void resume(JobContext jobContext) {
if (buildDir != null) synchronized (buildDir) {
if (buildDir.exists())
FileUtils.touchFile(new File(buildDir, "continue"));
}
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
if (buildDir != null) {
Commandline shell;
if (runningStep instanceof CommandFacade) {
CommandFacade commandStep = (CommandFacade) runningStep;
shell = new Commandline(commandStep.getShell(SystemUtils.IS_OS_WINDOWS, null)[0]);
} else if (SystemUtils.IS_OS_WINDOWS) {
shell = new Commandline("cmd");
} else {
shell = new Commandline("sh");
}
shell.workingDir(new File(buildDir, "workspace"));
return new CommandlineShell(terminal, shell);
} else {
throw new ExplicitException("Shell not ready");
}
}
}, new ArrayList<>());
if (!successful)
throw new FailedException();
} finally {
// Fix https://code.onedev.io/onedev/server/~issues/597
if (SystemUtils.IS_OS_WINDOWS && workspaceDir.exists())
FileUtils.deleteDir(workspaceDir);
synchronized (buildDir) {
FileUtils.deleteDir(buildDir);
}
}
}
@Override
public void resume(JobContext jobContext) {
if (buildDir != null) synchronized (buildDir) {
if (buildDir.exists())
FileUtils.touchFile(new File(buildDir, "continue"));
}
});
};
getResourceAllocator().runServerJob(getName(), getConcurrencyNumber(), 1, runnable);
}
@Override
@ -309,23 +345,4 @@ public class ServerShellExecutor extends JobExecutor implements Testable<TestDat
}
@Override
public Shell openShell(JobContext jobContext, Terminal terminal) {
if (buildDir != null) {
Commandline shell;
if (runningStep instanceof CommandFacade) {
CommandFacade commandStep = (CommandFacade) runningStep;
shell = new Commandline(commandStep.getShell(SystemUtils.IS_OS_WINDOWS, null)[0]);
} else if (SystemUtils.IS_OS_WINDOWS) {
shell = new Commandline("cmd");
} else {
shell = new Commandline("sh");
}
shell.workingDir(new File(buildDir, "workspace"));
return new CommandlineShell(terminal, shell);
} else {
throw new ExplicitException("Shell not ready");
}
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.imports.bitbucketcloud.BitbucketModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.imports.gitea.GiteaModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.imports.github.GitHubModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.imports.gitlab.GitLabModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.imports.jiracloud.JiraModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.imports.url.UrlModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.imports.youtrack.YouTrackModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.notification.discord.DiscordModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.notification.slack.SlackModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.report.coverage.CoverageModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.report.markdown.MarkdownModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.report.problem.ProblemModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.report.unittest.UnitTestModule</moduleClass>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.sso.discord.DiscordModule</moduleClass>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server-plugin</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<properties>
<moduleClass>io.onedev.server.plugin.sso.openid.OpenIdModule</moduleClass>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>io.onedev</groupId>
<artifactId>server</artifactId>
<version>7.8.17</version>
<version>7.9.0</version>
</parent>
<dependencies>
<dependency>

View File

@ -1,27 +1,21 @@
package io.onedev.server.product;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import com.google.common.base.Splitter;
import io.onedev.commons.bootstrap.Bootstrap;
import io.onedev.server.ServerConfig;
import io.onedev.server.persistence.HibernateConfig;
import org.apache.commons.lang3.StringUtils;
import org.glassfish.jersey.internal.guava.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Splitter;
import io.onedev.commons.bootstrap.Bootstrap;
import io.onedev.commons.utils.ExplicitException;
import io.onedev.server.ServerConfig;
import io.onedev.server.persistence.HibernateConfig;
import oshi.SystemInfo;
import oshi.hardware.HardwareAbstractionLayer;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.List;
@Singleton
public class DefaultServerConfig implements ServerConfig {
@ -38,10 +32,6 @@ public class DefaultServerConfig implements ServerConfig {
private static final String PROP_CLUSTER_PORT = "cluster_port";
private static final String PROP_SERVER_CPU = "server_cpu";
private static final String PROP_SERVER_MEMORY = "server_memory";
private int httpPort;
private int sshPort;
@ -138,54 +128,6 @@ public class DefaultServerConfig implements ServerConfig {
clusterPort = 5701;
else
clusterPort = Integer.parseInt(clusterPortStr.trim());
HardwareAbstractionLayer hardware = null;
try {
hardware = new SystemInfo().getHardware();
} catch (Exception e) {
logger.debug("Error calling oshi", e);
}
String cpuString = System.getenv(PROP_SERVER_CPU);
if (StringUtils.isBlank(cpuString))
cpuString = props.getProperty(PROP_SERVER_CPU);
if (StringUtils.isBlank(cpuString)) {
if (hardware != null) {
serverCpu = hardware.getProcessor().getLogicalProcessorCount()*1000;
} else {
serverCpu = 4000;
logger.warn("Unable to call oshi to get default cpu quota (cpu cores x 1000). Assuming as 4000. "
+ "Configure it manually via environment variable or server property '" + PROP_SERVER_CPU
+ "' if you do not want to use this value");
}
} else {
try {
serverCpu = Integer.parseInt(cpuString);
} catch (NumberFormatException e) {
throw new ExplicitException("Property '" + PROP_SERVER_CPU + "' should be a number");
}
}
String memoryString = System.getenv(PROP_SERVER_MEMORY);
if (StringUtils.isBlank(memoryString))
memoryString = props.getProperty(PROP_SERVER_MEMORY);
if (StringUtils.isBlank(memoryString)) {
if (hardware != null) {
serverMemory = (int) (hardware.getMemory().getTotal()/1024/1024);
} else {
serverMemory = 8000;
logger.warn("Unable to call oshi to get default memory quota (mega bytes of physical memory). "
+ "Assuming as 8000. Configure it manually via environment variable or server property "
+ "'" + PROP_SERVER_MEMORY + "' if you do not want to use this value");
}
} else {
try {
serverMemory = Integer.parseInt(memoryString);
} catch (NumberFormatException e) {
throw new ExplicitException("Property '" + PROP_SERVER_MEMORY + "' should be a number");
}
}
}
@Override
@ -223,14 +165,4 @@ public class DefaultServerConfig implements ServerConfig {
return clusterPort;
}
@Override
public int getServerCpu() {
return serverCpu;
}
@Override
public int getServerMemory() {
return serverMemory;
}
}