From d694a5e036ceea2abdd243f44876ba9bfbf45251 Mon Sep 17 00:00:00 2001 From: Robin Shen Date: Thu, 29 Dec 2022 20:09:54 +0800 Subject: [PATCH] Fix issue #1056 - Limiting overall memory usage and/or number of concurrent jobs --- pom.xml | 4 +- server-core/pom.xml | 2 +- .../java/io/onedev/server/ServerConfig.java | 4 - .../java/io/onedev/server/ServerSocket.java | 6 +- .../io/onedev/server/buildspec/BuildSpec.java | 2866 +++++++++-------- .../io/onedev/server/buildspec/Service.java | 271 +- .../io/onedev/server/buildspec/job/Job.java | 969 +++--- .../server/entitymanager/AgentManager.java | 4 +- .../impl/DefaultAgentManager.java | 69 +- .../java/io/onedev/server/job/AgentInfo.java | 33 - .../io/onedev/server/job/AgentRunnable.java | 9 + ...eption.java => CancellationException.java} | 4 +- .../onedev/server/job/DefaultJobManager.java | 501 ++- .../server/job/DefaultResourceAllocator.java | 687 ++-- .../java/io/onedev/server/job/JobContext.java | 21 +- .../io/onedev/server/job/JobExecution.java | 2 +- .../java/io/onedev/server/job/JobManager.java | 7 + .../io/onedev/server/job/JobRunnable.java | 17 + .../onedev/server/job/ResourceAllocator.java | 35 +- .../onedev/server/job/ResourceRunnable.java | 11 - .../onedev/server/migration/DataMigrator.java | 23 + .../java/io/onedev/server/model/Agent.java | 88 +- .../administration/PerformanceSetting.java | 29 + .../jobexecutor/JobExecutor.java | 420 +-- .../search/entity/agent/AgentQuery.java | 62 +- .../search/entity/agent/CpuCriteria.java | 54 - .../search/entity/agent/MemoryCriteria.java | 54 - .../util/concurrent/DefaultWorkExecutor.java | 17 +- .../web/behavior/AgentQueryBehavior.java | 29 +- .../buildsetting/agent/AgentListPanel.java | 40 - .../buildsetting/agent/AgentOverviewPage.html | 8 - .../buildsetting/agent/AgentOverviewPage.java | 2 - server-plugin/pom.xml | 2 +- server-plugin/server-plugin-archetype/pom.xml | 2 +- .../server-plugin-authenticator-ldap/pom.xml | 2 +- .../server-plugin-buildspec-gradle/pom.xml | 2 +- .../server-plugin-buildspec-maven/pom.xml | 2 +- .../server-plugin-buildspec-node/pom.xml | 2 +- .../server-plugin-executor-kubernetes/pom.xml | 2 +- .../kubernetes/KubernetesExecutor.java | 393 ++- .../pom.xml | 2 +- .../remotedocker/RemoteDockerExecutor.java | 293 +- .../pom.xml | 2 +- .../remoteshell/RemoteShellExecutor.java | 269 +- .../pom.xml | 2 +- .../serverdocker/ServerDockerExecutor.java | 795 ++--- .../pom.xml | 2 +- .../servershell/ServerShellExecutor.java | 465 +-- .../pom.xml | 2 +- .../server-plugin-import-gitea/pom.xml | 2 +- .../server-plugin-import-github/pom.xml | 2 +- .../server-plugin-import-gitlab/pom.xml | 2 +- .../server-plugin-import-jiracloud/pom.xml | 2 +- .../server-plugin-import-url/pom.xml | 2 +- .../server-plugin-import-youtrack/pom.xml | 2 +- .../pom.xml | 2 +- .../server-plugin-notification-slack/pom.xml | 2 +- .../server-plugin-report-checkstyle/pom.xml | 2 +- .../server-plugin-report-clover/pom.xml | 2 +- .../server-plugin-report-coverage/pom.xml | 2 +- .../server-plugin-report-cpd/pom.xml | 2 +- .../server-plugin-report-jacoco/pom.xml | 2 +- .../server-plugin-report-jest/pom.xml | 2 +- .../server-plugin-report-junit/pom.xml | 2 +- .../server-plugin-report-markdown/pom.xml | 2 +- .../server-plugin-report-pmd/pom.xml | 2 +- .../server-plugin-report-problem/pom.xml | 2 +- .../server-plugin-report-spotbugs/pom.xml | 2 +- .../server-plugin-report-unittest/pom.xml | 2 +- .../server-plugin-sso-discord/pom.xml | 2 +- .../server-plugin-sso-openid/pom.xml | 2 +- server-product/pom.xml | 2 +- .../server/product/DefaultServerConfig.java | 90 +- 73 files changed, 4138 insertions(+), 4585 deletions(-) delete mode 100644 server-core/src/main/java/io/onedev/server/job/AgentInfo.java create mode 100644 server-core/src/main/java/io/onedev/server/job/AgentRunnable.java rename server-core/src/main/java/io/onedev/server/job/{CancellerAwareCancellationException.java => CancellationException.java} (60%) create mode 100644 server-core/src/main/java/io/onedev/server/job/JobRunnable.java delete mode 100644 server-core/src/main/java/io/onedev/server/job/ResourceRunnable.java delete mode 100644 server-core/src/main/java/io/onedev/server/search/entity/agent/CpuCriteria.java delete mode 100644 server-core/src/main/java/io/onedev/server/search/entity/agent/MemoryCriteria.java diff --git a/pom.xml b/pom.xml index 02c779011d..0848b82acd 100644 --- a/pom.xml +++ b/pom.xml @@ -9,7 +9,7 @@ 1.2.0 server - 7.8.17 + 7.9.0 pom ${project.groupId}.${project.artifactId}-${project.version} @@ -620,7 +620,7 @@ 2.7.3 - 1.7.12 + 1.7.14 1.7.36 1.2.11 4.7.2 diff --git a/server-core/pom.xml b/server-core/pom.xml index fb6c32804c..14daf4a0d5 100644 --- a/server-core/pom.xml +++ b/server-core/pom.xml @@ -7,7 +7,7 @@ io.onedev server - 7.8.17 + 7.9.0 diff --git a/server-core/src/main/java/io/onedev/server/ServerConfig.java b/server-core/src/main/java/io/onedev/server/ServerConfig.java index 3942b80033..b6552724e8 100644 --- a/server-core/src/main/java/io/onedev/server/ServerConfig.java +++ b/server-core/src/main/java/io/onedev/server/ServerConfig.java @@ -34,8 +34,4 @@ public interface ServerConfig { int getClusterPort(); - int getServerCpu(); - - int getServerMemory(); - } diff --git a/server-core/src/main/java/io/onedev/server/ServerSocket.java b/server-core/src/main/java/io/onedev/server/ServerSocket.java index 0a437e4449..2841fa5f4d 100644 --- a/server-core/src/main/java/io/onedev/server/ServerSocket.java +++ b/server-core/src/main/java/io/onedev/server/ServerSocket.java @@ -19,7 +19,7 @@ import io.onedev.agent.AgentData; import io.onedev.agent.CallData; import io.onedev.agent.Message; import io.onedev.agent.MessageTypes; -import io.onedev.agent.WaitingForAgentResourceToBeReleased; +import io.onedev.agent.WantToDisconnectAgent; import io.onedev.agent.WebsocketUtils; import io.onedev.commons.utils.ExplicitException; import io.onedev.commons.utils.StringUtils; @@ -188,9 +188,9 @@ public class ServerSocket { private Serializable service(Serializable request) { try { - if (request instanceof WaitingForAgentResourceToBeReleased) { + if (request instanceof WantToDisconnectAgent) { if (agentId != null) - OneDev.getInstance(ResourceAllocator.class).waitingForAgentResourceToBeReleased(agentId); + OneDev.getInstance(ResourceAllocator.class).wantToDisconnectAgent(agentId); return null; } else { throw new ExplicitException("Unknown request: " + request.getClass()); diff --git a/server-core/src/main/java/io/onedev/server/buildspec/BuildSpec.java b/server-core/src/main/java/io/onedev/server/buildspec/BuildSpec.java index aff3d3a31b..9f35e2b665 100644 --- a/server-core/src/main/java/io/onedev/server/buildspec/BuildSpec.java +++ b/server-core/src/main/java/io/onedev/server/buildspec/BuildSpec.java @@ -1,1430 +1,1436 @@ -package io.onedev.server.buildspec; - -import java.io.Serializable; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.Stack; -import java.util.function.Function; - -import javax.annotation.Nullable; -import javax.validation.ConstraintValidatorContext; -import javax.validation.ConstraintViolation; -import javax.validation.Valid; -import javax.validation.ValidationException; -import javax.validation.Validator; - -import org.apache.commons.lang3.SerializationUtils; -import org.apache.wicket.Component; -import org.eclipse.jgit.revwalk.RevCommit; -import org.yaml.snakeyaml.DumperOptions.FlowStyle; -import org.yaml.snakeyaml.nodes.MappingNode; -import org.yaml.snakeyaml.nodes.Node; -import org.yaml.snakeyaml.nodes.NodeTuple; -import org.yaml.snakeyaml.nodes.ScalarNode; -import org.yaml.snakeyaml.nodes.SequenceNode; -import org.yaml.snakeyaml.nodes.Tag; - -import com.google.common.base.Preconditions; -import com.google.common.cache.CacheBuilder; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.LoadingCache; -import com.google.common.collect.Lists; - -import io.onedev.commons.codeassist.InputCompletion; -import io.onedev.commons.codeassist.InputStatus; -import io.onedev.commons.codeassist.InputSuggestion; -import io.onedev.commons.utils.ExceptionUtils; -import io.onedev.commons.utils.LinearRange; -import io.onedev.commons.utils.StringUtils; -import io.onedev.commons.utils.WordUtils; -import io.onedev.server.OneDev; -import io.onedev.server.buildspec.job.Job; -import io.onedev.server.buildspec.job.JobDependency; -import io.onedev.server.buildspec.param.ParamUtils; -import io.onedev.server.buildspec.param.spec.ParamSpec; -import io.onedev.server.buildspec.step.Step; -import io.onedev.server.buildspec.step.StepTemplate; -import io.onedev.server.buildspec.step.UseTemplateStep; -import io.onedev.server.migration.VersionedYamlDoc; -import io.onedev.server.migration.XmlBuildSpecMigrator; -import io.onedev.server.util.ComponentContext; -import io.onedev.server.util.JobSecretAuthorizationContext; -import io.onedev.server.util.validation.Validatable; -import io.onedev.server.util.validation.annotation.ClassValidating; -import io.onedev.server.web.editable.annotation.Editable; -import io.onedev.server.web.page.project.blob.ProjectBlobPage; -import io.onedev.server.web.util.SuggestionUtils; -import io.onedev.server.web.util.WicketUtils; - -@Editable -@ClassValidating -public class BuildSpec implements Serializable, Validatable { - - private static final long serialVersionUID = 1L; - - private static final LoadingCache parseCache = - CacheBuilder.newBuilder().softValues().build(new CacheLoader() { - - @Override - public byte[] load(String key) { - String buildSpecString = key; - if (buildSpecString.trim().startsWith(" jobs = new ArrayList<>(); - - private List stepTemplates = new ArrayList<>(); - - private List services = new ArrayList<>(); - - private List properties = new ArrayList<>(); - - private List imports = new ArrayList<>(); - - private transient List importedBuildSpecs; - - private transient Map jobMap; - - private transient Map stepTemplateMap; - - private transient Map serviceMap; - - private transient Map propertyMap; - - @Editable - @Valid - public List getJobs() { - return jobs; - } - - public void setJobs(List jobs) { - this.jobs = jobs; - jobMap = null; - } - - @Editable - @Valid - public List getStepTemplates() { - return stepTemplates; - } - - public void setStepTemplates(List stepTemplates) { - this.stepTemplates = stepTemplates; - stepTemplateMap = null; - } - - @Editable - @Valid - public List getServices() { - return services; - } - - public void setServices(List services) { - this.services = services; - serviceMap = null; - } - - @Editable - public List getProperties() { - return properties; - } - - public void setProperties(List properties) { - this.properties = properties; - propertyMap = null; - } - - @Editable - @Valid - public List getImports() { - return imports; - } - - public void setImports(List imports) { - this.imports = imports; - importedBuildSpecs = null; - } - - private List getImportedBuildSpecs(Collection projectChain) { - if (importedBuildSpecs == null) { - importedBuildSpecs = new ArrayList<>(); - for (Import aImport: getImports()) { - if (!projectChain.contains(aImport.getProjectPath())) { - Collection newProjectChain = new HashSet<>(projectChain); - newProjectChain.add(aImport.getProjectPath()); - try { - BuildSpec importedBuildSpec = aImport.getBuildSpec(); - RevCommit commit = aImport.getProject().getRevCommit(aImport.getTag(), true); - JobSecretAuthorizationContext.push(new JobSecretAuthorizationContext(aImport.getProject(), commit, null)); - try { - importedBuildSpecs.addAll(importedBuildSpec.getImportedBuildSpecs(newProjectChain)); - } finally { - JobSecretAuthorizationContext.pop(); - } - importedBuildSpecs.add(importedBuildSpec); - } catch (Exception e) { - // Ignore here as we rely on this method to show viewer/editor - // Errors relating to this will be shown when validated - } - } - } - } - return importedBuildSpecs; - } - - public Map getJobMap() { - if (jobMap == null) { - jobMap = new LinkedHashMap<>(); - for (BuildSpec buildSpec: getImportedBuildSpecs(new HashSet<>())) { - for (Job job: buildSpec.getJobs()) - jobMap.put(job.getName(), job); - } - for (Job job: getJobs()) - jobMap.put(job.getName(), job); - } - return jobMap; - } - - public Map getPropertyMap() { - if (propertyMap == null) { - propertyMap = new LinkedHashMap<>(); - for (BuildSpec buildSpec: getImportedBuildSpecs(new HashSet<>())) { - for (Property property: buildSpec.getProperties()) - propertyMap.put(property.getName(), property); - } - for (Property property: getProperties()) - propertyMap.put(property.getName(), property); - } - return propertyMap; - } - - public Map getStepTemplateMap() { - if (stepTemplateMap == null) { - stepTemplateMap = new LinkedHashMap<>(); - for (BuildSpec buildSpec: getImportedBuildSpecs(new HashSet<>())) { - for (StepTemplate template: buildSpec.getStepTemplates()) - stepTemplateMap.put(template.getName(), template); - } - for (StepTemplate template: getStepTemplates()) - stepTemplateMap.put(template.getName(), template); - } - return stepTemplateMap; - } - - public Map getServiceMap() { - if (serviceMap == null) { - serviceMap = new LinkedHashMap<>(); - for (BuildSpec buildSpec: getImportedBuildSpecs(new HashSet<>())) { - for (Service service: buildSpec.getServices()) - serviceMap.put(service.getName(), service); - } - for (Service service: services) - serviceMap.put(service.getName(), service); - } - return serviceMap; - } - - private int getImportIndex(String namedElementName, Function> namedElementMapProvider) { - for (int i=imports.size()-1; i>=0; i--) { - if (namedElementMapProvider.apply(imports.get(i).getBuildSpec()).containsKey(namedElementName)) - return i; - } - return -1; - } - - private boolean validateImportedElements(ConstraintValidatorContext context, - List namedElements, Function> namedElementMapProvider, String elementTypeName) { - boolean isValid = true; - Validator validator = OneDev.getInstance(Validator.class); - for (T element: namedElementMapProvider.apply(this).values()) { - int elementIndex = namedElements.indexOf(element); - if (elementIndex == -1) { - int importIndex = getImportIndex(element.getName(), namedElementMapProvider); - for (ConstraintViolation violation: validator.validate(element)) { - String location; - if (violation.getPropertyPath().toString().length() != 0) - location = "location: " + violation.getPropertyPath() + ", "; - else - location = ""; - - String errorMessage = String.format("Error validating imported %s (%s: %s, %serror message: %s)", - elementTypeName, elementTypeName, element.getName(), location, violation.getMessage()); - context.buildConstraintViolationWithTemplate(errorMessage) - .addPropertyNode(PROP_IMPORTS) - .addBeanNode() - .inIterable().atIndex(importIndex) - .addConstraintViolation(); - isValid = false; - } - } - } - return isValid; - } - - @Override - public boolean isValid(ConstraintValidatorContext context) { - boolean isValid = true; - - if (!validateImportedElements(context, jobs, it->it.getJobMap(), "job")) - isValid = false; - if (!validateImportedElements(context, services, it->it.getServiceMap(), "service")) - isValid = false; - if (!validateImportedElements(context, stepTemplates, it->it.getStepTemplateMap(), "step template")) - isValid = false; - if (!validateImportedElements(context, properties, it->it.getPropertyMap(), "property")) - isValid = false; - - Set jobNames = new HashSet<>(); - for (Job job: jobs) { - if (!jobNames.add(job.getName())) { - context.buildConstraintViolationWithTemplate("Duplicate job name (" + job.getName() + ")") - .addPropertyNode(PROP_JOBS).addConstraintViolation(); - isValid = false; - } - } - Set serviceNames = new HashSet<>(); - for (Service service: services) { - if (!serviceNames.add(service.getName())) { - context.buildConstraintViolationWithTemplate("Duplicate service name (" + service.getName() + ")") - .addPropertyNode(PROP_SERVICES).addConstraintViolation(); - isValid = false; - } - } - Set stepTemplateNames = new HashSet<>(); - for (StepTemplate template: stepTemplates) { - if (!stepTemplateNames.add(template.getName())) { - context.buildConstraintViolationWithTemplate("Duplicate template name (" + template.getName() + ")") - .addPropertyNode(PROP_STEP_TEMPLATES).addConstraintViolation(); - isValid = false; - } - } - Set propertyNames = new HashSet<>(); - for (Property property: properties) { - if (!propertyNames.add(property.getName())) { - context.buildConstraintViolationWithTemplate("Duplicate property name (" + property.getName() + ")") - .addPropertyNode(PROP_PROPERTIES).addConstraintViolation(); - isValid = false; - } - } - Set importProjectNames = new HashSet<>(); - for (Import aImport: imports) { - if (!importProjectNames.add(aImport.getProjectPath())) { - context.buildConstraintViolationWithTemplate("Duplicate import (" + aImport.getProjectPath() + ")") - .addPropertyNode(PROP_IMPORTS).addConstraintViolation(); - isValid = false; - } - } - - if (isValid) { - for (StepTemplate template: getStepTemplateMap().values()) { - int templateIndex = stepTemplates.indexOf(template); - for (int stepIndex=0; stepIndex()); - } catch (Exception e) { - if (templateIndex != -1) { - context.buildConstraintViolationWithTemplate(e.getMessage()) - .addPropertyNode(PROP_STEP_TEMPLATES) - .addPropertyNode(StepTemplate.PROP_STEPS) - .inIterable().atIndex(templateIndex) - .addPropertyNode(UseTemplateStep.PROP_TEMPLATE_NAME) - .inIterable().atIndex(stepIndex) - .addConstraintViolation(); - } else { - int importIndex = getImportIndex(template.getName(), it->it.getStepTemplateMap()); - String errorMessage = String.format("Error validating imported step template (step template: %s, error message: %s)", - template.getName(), e.getMessage()); - context.buildConstraintViolationWithTemplate(errorMessage) - .addPropertyNode(PROP_IMPORTS) - .addBeanNode() - .inIterable().atIndex(importIndex) - .addConstraintViolation(); - } - isValid = false; - } - } - } - } - - for (Job job: getJobMap().values()) { - int jobIndex = jobs.indexOf(job); - try { - checkDependencies(job, new ArrayList<>()); - } catch (Exception e) { - if (jobIndex != -1) { - context.buildConstraintViolationWithTemplate(e.getMessage()) - .addPropertyNode(PROP_JOBS) - .addPropertyNode(Job.PROP_JOB_DEPENDENCIES) - .inIterable().atIndex(jobIndex) - .addConstraintViolation(); - } else { - int importIndex = getImportIndex(job.getName(), it->it.getJobMap()); - String errorMessage = String.format("Error validating imported job (job: %s, error message: %s)", - job.getName(), e.getMessage()); - context.buildConstraintViolationWithTemplate(errorMessage) - .addPropertyNode(PROP_IMPORTS) - .addBeanNode() - .inIterable().atIndex(importIndex) - .addConstraintViolation(); - } - isValid = false; - } - - for (String serviceName: job.getRequiredServices()) { - if (!getServiceMap().containsKey(serviceName)) { - context.buildConstraintViolationWithTemplate("Undefined service (" + serviceName + ")") - .addPropertyNode(PROP_JOBS) - .addPropertyNode(Job.PROP_REQUIRED_SERVICES) - .inIterable().atIndex(jobIndex) - .addConstraintViolation(); - isValid = false; - } - } - - for (int stepIndex=0; stepIndex()); - } catch (Exception e) { - if (jobIndex != -1) { - context.buildConstraintViolationWithTemplate(e.getMessage()) - .addPropertyNode(PROP_JOBS) - .addPropertyNode(Job.PROP_STEPS) - .inIterable().atIndex(jobIndex) - .addPropertyNode(UseTemplateStep.PROP_TEMPLATE_NAME) - .inIterable().atIndex(stepIndex) - .addConstraintViolation(); - } else { - int importIndex = getImportIndex(job.getName(), it->it.getJobMap()); - String errorMessage = String.format("Error validating imported job (job: %s, location: steps[%d].templateName, error message: %s)", - job.getName(), stepIndex, e.getMessage()); - context.buildConstraintViolationWithTemplate(errorMessage) - .addPropertyNode(PROP_IMPORTS) - .addBeanNode() - .inIterable().atIndex(importIndex) - .addConstraintViolation(); - } - isValid = false; - } - } - } - } - } - - if (isValid) { - // Build spec and jobs are valid so far, we can do more validations with them safely - for (Job job: getJobMap().values()) { - int jobIndex = jobs.indexOf(job); - for (int actionIndex=0; actionIndexit.getJobMap()); - String errorMessage = String.format("Error validating imported job (job: %s, error message: %s)", - job.getName(), e.getMessage()); - context.buildConstraintViolationWithTemplate(errorMessage) - .addPropertyNode(PROP_IMPORTS) - .addBeanNode() - .inIterable().atIndex(importIndex) - .addConstraintViolation(); - } - isValid = false; - } - } - } - } - - if (!isValid) - context.disableDefaultConstraintViolation(); - return isValid; - } - - private void checkTemplateUsages(UseTemplateStep step, List templateChain) { - if(templateChain.contains(step.getTemplateName())) { - templateChain.add(step.getTemplateName()); - throw new ValidationException("Circular template usages (" + templateChain + ")"); - } else { - StepTemplate template = getStepTemplateMap().get(step.getTemplateName()); - if (template != null) { - if (templateChain.isEmpty()) { - try { - ParamUtils.validateParams(template.getParamSpecs(), step.getParams()); - } catch (Exception e) { - throw new ValidationException(String.format("Error validating step template parameters (%s)", e.getMessage())); - } - } - templateChain.add(step.getTemplateName()); - for (Step templateStep: template.getSteps()) { - if (templateStep instanceof UseTemplateStep) - checkTemplateUsages((UseTemplateStep) templateStep, new ArrayList<>(templateChain)); - } - } else if (templateChain.isEmpty()) { - throw new ValidationException("Step template not found (" + step.getTemplateName() + ")"); - } - } - } - - private void checkDependencies(Job job, List dependencyChain) { - for (JobDependency dependency: job.getJobDependencies()) { - if (dependencyChain.contains(dependency.getJobName())) { - dependencyChain.add(dependency.getJobName()); - throw new ValidationException("Circular dependencies (" + dependencyChain + ")"); - } else { - Job dependencyJob = getJobMap().get(dependency.getJobName()); - if (dependencyJob != null) { - if (dependencyChain.isEmpty()) { - try { - ParamUtils.validateParams(dependencyJob.getParamSpecs(), dependency.getJobParams()); - } catch (ValidationException e) { - String message = String.format("Error validating dependency job parameters (dependency job: %s, error message: %s)", - dependencyJob.getName(), e.getMessage()); - throw new ValidationException(message); - } - } - List newDependencyChain = new ArrayList<>(dependencyChain); - newDependencyChain.add(dependency.getJobName()); - checkDependencies(dependencyJob, newDependencyChain); - } else if (dependencyChain.isEmpty()) { - throw new ValidationException("Dependency job not found (" + dependency.getJobName() + ")"); - } - } - } - } - - public static List suggestOverrides(List imported, InputStatus status) { - List completions = new ArrayList<>(); - String matchWith = status.getContentBeforeCaret().toLowerCase(); - for (String each: imported) { - LinearRange match = LinearRange.match(each, matchWith); - if (match != null) { - completions.add(new InputCompletion(each, each + status.getContentAfterCaret(), - each.length(), "override imported", match)); - } - } - - return completions; - } - - @Nullable - public static BuildSpec get() { - Component component = ComponentContext.get().getComponent(); - BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class); - if (buildSpecAware != null) - return buildSpecAware.getBuildSpec(); - else - return null; - } - - public static List suggestVariables(String matchWith, - boolean withBuildVersion, boolean withDynamicVariables, boolean withPauseCommand) { - List suggestions = new ArrayList<>(); - BuildSpec buildSpec = get(); - if (buildSpec != null) { - ProjectBlobPage page = (ProjectBlobPage) WicketUtils.getPage(); - suggestions.addAll(SuggestionUtils.suggestVariables( - page.getProject(), buildSpec, ParamSpec.list(), - matchWith, withBuildVersion, withDynamicVariables, withPauseCommand)); - } - return suggestions; - } - - @Nullable - public static BuildSpec parse(byte[] bytes) { - String buildSpecString = new String(bytes, StandardCharsets.UTF_8); - if (StringUtils.isNotBlank(buildSpecString)) { - try { - return SerializationUtils.deserialize(parseCache.getUnchecked(buildSpecString)); - } catch (Exception e) { - BuildSpecParseException parseException = ExceptionUtils.find(e, BuildSpecParseException.class); - if (parseException != null) - throw parseException; - else - throw e; - } - } else { - return null; - } - } - - @SuppressWarnings("unused") - private void migrate1(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { - NodeTuple jobTuple = itJobTuple.next(); - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("submoduleCredentials")) { - itJobTuple.remove(); - } else if (jobTupleKey.equals("projectDependencies")) { - SequenceNode projectDependenciesNode = (SequenceNode) jobTuple.getValueNode(); - for (Node projectDependenciesItem: projectDependenciesNode.getValue()) { - MappingNode projectDependencyNode = (MappingNode) projectDependenciesItem; - for (Iterator itProjectDependencyTuple = projectDependencyNode.getValue().iterator(); - itProjectDependencyTuple.hasNext();) { - NodeTuple projectDependencyTuple = itProjectDependencyTuple.next(); - if (((ScalarNode)projectDependencyTuple.getKeyNode()).getValue().equals("authentication")) - itProjectDependencyTuple.remove(); - } - } - } - } - NodeTuple cloneCredentialTuple = new NodeTuple( - new ScalarNode(Tag.STR, "cloneCredential"), - new MappingNode(new Tag("!DefaultCredential"), Lists.newArrayList(), FlowStyle.BLOCK)); - jobNode.getValue().add(cloneCredentialTuple); - } - } - } - } - - @SuppressWarnings("unused") - private void migrate2(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { - NodeTuple jobTuple = itJobTuple.next(); - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("defaultFixedIssuesFilter")) { - itJobTuple.remove(); - } else if (jobTupleKey.equals("reports")) { - SequenceNode reportsNode = (SequenceNode) jobTuple.getValueNode(); - for (Iterator itReportsItem = reportsNode.getValue().iterator(); itReportsItem.hasNext();) { - MappingNode reportNode = (MappingNode) itReportsItem.next(); - if (reportNode.getTag().getValue().equals("!JobHtmlReport")) - itReportsItem.remove(); - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate3(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("reports")) { - SequenceNode reportsNode = (SequenceNode) jobTuple.getValueNode(); - for (Node reportNode: reportsNode.getValue()) { - if (reportNode.getTag().getValue().equals("!JobJestReport")) - reportNode.setTag(new Tag("!JobJestTestReport")); - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate4(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("triggers")) { - SequenceNode triggersNode = (SequenceNode) jobTuple.getValueNode(); - for (Node triggerNode: triggersNode.getValue()) { - if (triggerNode.getTag().getValue().equals("!PullRequestTrigger")) - triggerNode.setTag(new Tag("!PullRequestUpdateTrigger")); - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate5(VersionedYamlDoc doc, Stack versions) { - List newServiceNodes = new ArrayList<>(); - for (NodeTuple specTuple: doc.getValue()) { - if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - Node imageNode = null; - Node commandsNode = null; - Node servicesNode = null; - String jobName = null; - for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { - NodeTuple jobTuple = itJobTuple.next(); - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("name")) { - jobName = ((ScalarNode)jobTuple.getValueNode()).getValue(); - } else if (jobTupleKey.equals("image")) { - imageNode = jobTuple.getValueNode(); - itJobTuple.remove(); - } else if (jobTupleKey.equals("commands")) { - commandsNode = jobTuple.getValueNode(); - itJobTuple.remove(); - } else if (jobTupleKey.equals("services")) { - servicesNode = jobTuple.getValueNode(); - itJobTuple.remove(); - } - } - - Preconditions.checkState(jobName != null && imageNode != null && commandsNode != null); - - List stepTuples = new ArrayList<>(); - stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "image"), imageNode)); - stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "commands"), commandsNode)); - stepTuples.add(new NodeTuple( - new ScalarNode(Tag.STR, "condition"), - new ScalarNode(Tag.STR, "ALL_PREVIOUS_STEPS_WERE_SUCCESSFUL"))); - Node stepNode = new MappingNode(new Tag("!CommandStep"), stepTuples, FlowStyle.BLOCK); - Node stepsNode = new SequenceNode(Tag.SEQ, Lists.newArrayList(stepNode), FlowStyle.BLOCK); - NodeTuple stepsTuple = new NodeTuple(new ScalarNode(Tag.STR, "steps"), stepsNode); - jobNode.getValue().add(stepsTuple); - - if (servicesNode != null) { - List serviceNameNodes = new ArrayList<>(); - for (Node serviceNodeItem: ((SequenceNode) servicesNode).getValue()) { - MappingNode serviceNode = (MappingNode) serviceNodeItem; - List newServiceTuples = new ArrayList<>(); - for (NodeTuple serviceTuple: serviceNode.getValue()) { - if (((ScalarNode)serviceTuple.getKeyNode()).getValue().equals("name")) { - String newServiceName = jobName + "-" - + ((ScalarNode)serviceTuple.getValueNode()).getValue(); - serviceNameNodes.add(new ScalarNode(Tag.STR, newServiceName)); - newServiceTuples.add(new NodeTuple( - new ScalarNode(Tag.STR, "name"), - new ScalarNode(Tag.STR, newServiceName))); - } else { - newServiceTuples.add(serviceTuple); - } - } - newServiceNodes.add(new MappingNode(Tag.MAP, newServiceTuples, FlowStyle.BLOCK)); - } - jobNode.getValue().add(new NodeTuple( - new ScalarNode(Tag.STR, "requiredServices"), - new SequenceNode(Tag.SEQ, serviceNameNodes, FlowStyle.BLOCK))); - } - } - } - } - - if (!newServiceNodes.isEmpty()) { - doc.getValue().add(new NodeTuple( - new ScalarNode(Tag.STR, "services"), - new SequenceNode(Tag.SEQ, newServiceNodes, FlowStyle.BLOCK))); - } - } - - @SuppressWarnings("unused") - private void migrate6(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - boolean retrieveSource = false; - Node cloneCredentialNode = null; - Node cloneDepthNode = null; - Node artifactsNode = null; - SequenceNode reportsNode = null; - SequenceNode stepsNode = null; - List actionNodes = new ArrayList<>(); - for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { - NodeTuple jobTuple = itJobTuple.next(); - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("retrieveSource")) { - retrieveSource = ((ScalarNode)jobTuple.getValueNode()).getValue().equals("true"); - itJobTuple.remove(); - } else if (jobTupleKey.equals("cloneCredential")) { - cloneCredentialNode = jobTuple.getValueNode(); - itJobTuple.remove(); - } else if (jobTupleKey.equals("cloneDepth")) { - cloneDepthNode = jobTuple.getValueNode(); - itJobTuple.remove(); - } else if (jobTupleKey.equals("artifacts")) { - artifactsNode = jobTuple.getValueNode(); - itJobTuple.remove(); - } else if (jobTupleKey.equals("reports")) { - reportsNode = (SequenceNode) jobTuple.getValueNode(); - itJobTuple.remove(); - } else if (jobTupleKey.equals("steps")) { - stepsNode = (SequenceNode) jobTuple.getValueNode(); - } else if (jobTupleKey.equals("postBuildActions")) { - SequenceNode actionsNode = (SequenceNode) jobTuple.getValueNode(); - for (Iterator itActionNode = actionsNode.getValue().iterator(); itActionNode.hasNext();) { - MappingNode actionNode = (MappingNode) itActionNode.next(); - String tagName = actionNode.getTag().getValue(); - if (tagName.equals("!CreateTagAction") || tagName.equals("!CloseMilestoneAction")) { - actionNodes.add(actionNode); - itActionNode.remove(); - } - } - if (actionsNode.getValue().isEmpty()) - itJobTuple.remove(); - } - } - Preconditions.checkState(cloneCredentialNode != null && stepsNode != null); - if (retrieveSource) { - List stepTuples = new ArrayList<>(); - stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "cloneCredential"), cloneCredentialNode)); - if (cloneDepthNode != null) - stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "cloneDepth"), cloneDepthNode)); - stepTuples.add(new NodeTuple( - new ScalarNode(Tag.STR, "condition"), - new ScalarNode(Tag.STR, "ALL_PREVIOUS_STEPS_WERE_SUCCESSFUL"))); - Node stepNode = new MappingNode(new Tag("!CheckoutStep"), stepTuples, FlowStyle.BLOCK); - stepsNode.getValue().add(0, stepNode); - } - if (artifactsNode != null) { - List stepTuples = new ArrayList<>(); - stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "artifacts"), artifactsNode)); - stepTuples.add(new NodeTuple( - new ScalarNode(Tag.STR, "condition"), - new ScalarNode(Tag.STR, "ALL_PREVIOUS_STEPS_WERE_SUCCESSFUL"))); - Node stepNode = new MappingNode(new Tag("!PublishArtifactStep"), stepTuples, FlowStyle.BLOCK); - stepsNode.getValue().add(stepNode); - } - if (reportsNode != null) { - for (Node reportsNodeItem: reportsNode.getValue()) { - MappingNode reportNode = (MappingNode) reportsNodeItem; - List stepTuples = new ArrayList<>(); - stepTuples.addAll(reportNode.getValue()); - stepTuples.add(new NodeTuple( - new ScalarNode(Tag.STR, "condition"), - new ScalarNode(Tag.STR, "ALWAYS"))); - String tagName = reportNode.getTag().getValue(); - tagName = tagName.replaceFirst("Job", "Publish") + "Step"; - Node stepNode = new MappingNode(new Tag(tagName), stepTuples, FlowStyle.BLOCK); - stepsNode.getValue().add(stepNode); - } - } - for (MappingNode actionNode: actionNodes) { - String tagName = actionNode.getTag().getValue().replace("Action", "Step"); - List stepTuples = new ArrayList<>(); - for (NodeTuple tuple: actionNode.getValue()) { - String key = ((ScalarNode)tuple.getKeyNode()).getValue(); - if (!key.equals("condition")) - stepTuples.add(tuple); - } - stepTuples.add(new NodeTuple( - new ScalarNode(Tag.STR, "condition"), - new ScalarNode(Tag.STR, "ALL_PREVIOUS_STEPS_WERE_SUCCESSFUL"))); - Node stepNode = new MappingNode(new Tag(tagName), stepTuples, FlowStyle.BLOCK); - stepsNode.getValue().add(stepNode); - } - } - } - } - - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("steps")) { - SequenceNode stepsNode = (SequenceNode) jobTuple.getValueNode(); - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - String tagName = stepNode.getTag().getValue(); - String stepName = WordUtils.uncamel(tagName.substring(1).replace("Step", "")).toLowerCase(); - stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "name"), - new ScalarNode(Tag.STR, stepName))); - } - } - } - } - } else if (specObjectKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stepTemplateTupleKey.equals("steps")) { - SequenceNode stepsNode = (SequenceNode) stepTemplateTuple.getValueNode(); - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - String tagName = stepNode.getTag().getValue(); - String stepName = WordUtils.uncamel(tagName.substring(1).replace("Step", "")).toLowerCase(); - stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "name"), - new ScalarNode(Tag.STR, stepName))); - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate7(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("paramSpecs")) { - SequenceNode paramsNode = (SequenceNode) jobTuple.getValueNode(); - for (Node paramsNodeItem: paramsNode.getValue()) { - MappingNode paramNode = (MappingNode) paramsNodeItem; - String paramType = paramNode.getTag().getValue(); - if (paramType.equals("!NumberParam")) { - paramNode.setTag(new Tag("!IntegerParam")); - } else if (paramType.equals("!TextParam")) { - NodeTuple multilineTuple = new NodeTuple( - new ScalarNode(Tag.STR, "multiline"), - new ScalarNode(Tag.STR, "false")); - paramNode.getValue().add(multilineTuple); - } - } - } - } - } - } else if (specKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stemTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stemTemplateTupleKey.equals("paramSpecs")) { - SequenceNode paramsNode = (SequenceNode) stepTemplateTuple.getValueNode(); - for (Node paramsNodeItem: paramsNode.getValue()) { - MappingNode paramNode = (MappingNode) paramsNodeItem; - String paramType = paramNode.getTag().getValue(); - if (paramType.equals("!NumberParam")) { - paramNode.setTag(new Tag("!IntegerParam")); - } else if (paramType.equals("!TextParam")) { - NodeTuple multilineTuple = new NodeTuple( - new ScalarNode(Tag.STR, "multiline"), - new ScalarNode(Tag.STR, "false")); - paramNode.getValue().add(multilineTuple); - } - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate8(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("projectDependencies")) { - SequenceNode projectDependenciesNode = (SequenceNode) jobTuple.getValueNode(); - for (Node projectDependenciesNodeItem: projectDependenciesNode.getValue()) { - MappingNode projectDependencyNode = (MappingNode) projectDependenciesNodeItem; - String buildNumber = null; - for (Iterator itProjectDependencyTuple = projectDependencyNode.getValue().iterator(); itProjectDependencyTuple.hasNext();) { - NodeTuple projectDependencyTuple = itProjectDependencyTuple.next(); - String projectDependencyTupleKey = ((ScalarNode)projectDependencyTuple.getKeyNode()).getValue(); - if (projectDependencyTupleKey.equals("buildNumber")) { - buildNumber = ((ScalarNode)projectDependencyTuple.getValueNode()).getValue(); - itProjectDependencyTuple.remove(); - break; - } - } - Preconditions.checkNotNull(buildNumber); - - List buildProviderTuples = new ArrayList<>(); - buildProviderTuples.add(new NodeTuple( - new ScalarNode(Tag.STR, "buildNumber"), - new ScalarNode(Tag.STR, buildNumber))); - Node buildProviderNode = new MappingNode(new Tag("!SpecifiedBuild"), buildProviderTuples, FlowStyle.BLOCK); - projectDependencyNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "buildProvider"), buildProviderNode)); - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate9(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("steps")) { - SequenceNode stepsNode = (SequenceNode) jobTuple.getValueNode(); - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!CommandStep")) { - stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "useTTY"), - new ScalarNode(Tag.BOOL, "false"))); - } - } - } - } - } - } else if (specObjectKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stepTemplateTupleKey.equals("steps")) { - SequenceNode stepsNode = (SequenceNode) stepTemplateTuple.getValueNode(); - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!CommandStep")) { - stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "useTTY"), - new ScalarNode(Tag.BOOL, "false"))); - } - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate10(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("cpuRequirement")) { - ScalarNode cpuRequirementNode = (ScalarNode) jobTuple.getValueNode(); - String cpuRequirement = cpuRequirementNode.getValue(); - cpuRequirementNode.setValue(cpuRequirement.substring(0, cpuRequirement.length()-1)); - } else if (jobTupleKey.equals("memoryRequirement")) { - ScalarNode memoryRequirementNode = (ScalarNode) jobTuple.getValueNode(); - String memoryRequirement = memoryRequirementNode.getValue(); - memoryRequirementNode.setValue(memoryRequirement.substring(0, memoryRequirement.length()-1)); - } - } - } - } else if (specObjectKey.equals("services")) { - SequenceNode servicesNode = (SequenceNode) specTuple.getValueNode(); - for (Node servicesNodeItem: servicesNode.getValue()) { - MappingNode serviceNode = (MappingNode) servicesNodeItem; - for (NodeTuple serviceTuple: serviceNode.getValue()) { - String serviceTupleKey = ((ScalarNode)serviceTuple.getKeyNode()).getValue(); - if (serviceTupleKey.equals("cpuRequirement")) { - ScalarNode cpuRequirementNode = (ScalarNode) serviceTuple.getValueNode(); - String cpuRequirement = cpuRequirementNode.getValue(); - cpuRequirementNode.setValue(cpuRequirement.substring(0, cpuRequirement.length()-1)); - } else if (serviceTupleKey.equals("memoryRequirement")) { - ScalarNode memoryRequirementNode = (ScalarNode) serviceTuple.getValueNode(); - String memoryRequirement = memoryRequirementNode.getValue(); - memoryRequirementNode.setValue(memoryRequirement.substring(0, memoryRequirement.length()-1)); - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate11(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("steps")) { - SequenceNode stepsNode = (SequenceNode) jobTuple.getValueNode(); - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!CheckoutStep")) { - stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "withLfs"), - new ScalarNode(Tag.BOOL, "false"))); - stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "withSubmodules"), - new ScalarNode(Tag.BOOL, "true"))); - } - } - } - } - } - } else if (specObjectKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stepTemplateTupleKey.equals("steps")) { - SequenceNode stepsNode = (SequenceNode) stepTemplateTuple.getValueNode(); - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!CheckoutStep")) { - stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "withLfs"), - new ScalarNode(Tag.BOOL, "false"))); - stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "withSubmodules"), - new ScalarNode(Tag.BOOL, "true"))); - } - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate12(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("steps")) { - SequenceNode stepsNode = (SequenceNode) jobTuple.getValueNode(); - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!PublishJestTestReportStep")) - stepNode.setTag(new Tag("!PublishJestReportStep")); - } - } - } - } - } else if (specObjectKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stepTemplateTupleKey.equals("steps")) { - SequenceNode stepsNode = (SequenceNode) stepTemplateTuple.getValueNode(); - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!PublishJestTestReportStep")) - stepNode.setTag(new Tag("!PublishJestReportStep")); - } - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate13(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specTupleKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specTupleKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { - NodeTuple jobTuple = itJobTuple.next(); - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("projectDependencies")) { - SequenceNode projectDependenciesNode = (SequenceNode) jobTuple.getValueNode(); - for (Node projectDependenciesItem: projectDependenciesNode.getValue()) { - MappingNode projectDependencyNode = (MappingNode) projectDependenciesItem; - for (Iterator itProjectDependencyTuple = projectDependencyNode.getValue().iterator(); - itProjectDependencyTuple.hasNext();) { - NodeTuple projectDependencyTuple = itProjectDependencyTuple.next(); - ScalarNode projectDependencyTupleKeyNode = ((ScalarNode)projectDependencyTuple.getKeyNode()); - if (projectDependencyTupleKeyNode.getValue().equals("projectName")) - projectDependencyTupleKeyNode.setValue("projectPath"); - } - } - } - } - } - } else if (specTupleKey.equals("imports")) { - SequenceNode importsNode = (SequenceNode) specTuple.getValueNode(); - for (Node importsNodeItem: importsNode.getValue()) { - MappingNode importNode = (MappingNode) importsNodeItem; - for (Iterator itImportTuple = importNode.getValue().iterator(); itImportTuple.hasNext();) { - NodeTuple importTuple = itImportTuple.next(); - ScalarNode importTupleKeyNode = (ScalarNode)importTuple.getKeyNode(); - if (importTupleKeyNode.getValue().equals("projectName")) - importTupleKeyNode.setValue("projectPath"); - } - } - } - } - } - - private void migrate14_steps(SequenceNode stepsNode) { - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!CommandStep")) { - Node commandsNode = null; - for (Iterator itStepNodeTuple = stepNode.getValue().iterator(); itStepNodeTuple.hasNext();) { - NodeTuple stepNodeTuple = itStepNodeTuple.next(); - if (((ScalarNode)stepNodeTuple.getKeyNode()).getValue().equals("commands")) { - commandsNode = stepNodeTuple.getValueNode(); - itStepNodeTuple.remove(); - break; - } - } - if (commandsNode != null) { - List interpreterTuples = new ArrayList<>(); - interpreterTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "commands"), commandsNode)); - stepNode.getValue().add(new NodeTuple( - new ScalarNode(Tag.STR, "interpreter"), - new MappingNode(new Tag("!DefaultInterpreter"), interpreterTuples, FlowStyle.BLOCK))); - } - stepNode.getValue().add(new NodeTuple( - new ScalarNode(Tag.STR, "runInContainer"), - new ScalarNode(Tag.BOOL, "true"))); - } - } - } - - @SuppressWarnings("unused") - private void migrate14(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("steps")) - migrate14_steps((SequenceNode) jobTuple.getValueNode()); - } - } - } else if (specObjectKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stepTemplateTupleKey.equals("steps")) - migrate14_steps((SequenceNode) stepTemplateTuple.getValueNode()); - } - } - } - } - } - - private void migrate15_steps(SequenceNode stepsNode) { - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!BuildImageStep")) { - for (Iterator itStepNodeTuple = stepNode.getValue().iterator(); itStepNodeTuple.hasNext();) { - NodeTuple stepNodeTuple = itStepNodeTuple.next(); - String key = ((ScalarNode)stepNodeTuple.getKeyNode()).getValue(); - if (key.equals("useTTY") || key.equals("login")) - itStepNodeTuple.remove(); - } - } - } - } - - @SuppressWarnings("unused") - private void migrate15(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("steps")) - migrate15_steps((SequenceNode) jobTuple.getValueNode()); - } - } - } else if (specObjectKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stepTemplateTupleKey.equals("steps")) - migrate15_steps((SequenceNode) stepTemplateTuple.getValueNode()); - } - } - } - } - } - - private void migrate16_steps(SequenceNode stepsNode) { - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!CommandStep")) { - for (Iterator itStepNodeTuple = stepNode.getValue().iterator(); itStepNodeTuple.hasNext();) { - NodeTuple stepNodeTuple = itStepNodeTuple.next(); - String key = ((ScalarNode)stepNodeTuple.getKeyNode()).getValue(); - if (key.equals("interpreter")) { - MappingNode interpreterNode = (MappingNode) stepNodeTuple.getValueNode(); - if (interpreterNode.getTag().getValue().equals("!BashInterpreter")) { - interpreterNode.setTag(new Tag("!ShellInterpreter")); - interpreterNode.getValue().add(new NodeTuple( - new ScalarNode(Tag.STR, "shell"), new ScalarNode(Tag.STR, "bash"))); - } - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate16(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("steps")) - migrate16_steps((SequenceNode) jobTuple.getValueNode()); - } - } - } else if (specObjectKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stepTemplateTupleKey.equals("steps")) - migrate16_steps((SequenceNode) stepTemplateTuple.getValueNode()); - } - } - } - } - } - - @SuppressWarnings("unused") - private void migrate17(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - List actionNodes = new ArrayList<>(); - for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { - NodeTuple jobTuple = itJobTuple.next(); - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("postBuildActions")) { - SequenceNode actionsNode = (SequenceNode) jobTuple.getValueNode(); - for (Node actionNodeItem: actionsNode.getValue()) { - MappingNode actionNode = (MappingNode) actionNodeItem; - if (actionNode.getTag().getValue().equals("!CreateIssueAction")) { - actionNode.getValue().add(new NodeTuple( - new ScalarNode(Tag.STR, "issueConfidential"), new ScalarNode(Tag.STR, "false"))); - } - } - } - } - } - } - } - } - - private void migrate18_steps(SequenceNode stepsNode) { - for (Node stepsNodeItem: stepsNode.getValue()) { - MappingNode stepNode = (MappingNode) stepsNodeItem; - if (stepNode.getTag().getValue().equals("!PullRepository")) { - stepNode.getValue().add(new NodeTuple( - new ScalarNode(Tag.STR, "syncToChildProject"), new ScalarNode(Tag.STR, "false"))); - } - } - } - - @SuppressWarnings("unused") - private void migrate18(VersionedYamlDoc doc, Stack versions) { - for (NodeTuple specTuple: doc.getValue()) { - String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); - if (specObjectKey.equals("jobs")) { - SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); - for (Node jobsNodeItem: jobsNode.getValue()) { - MappingNode jobNode = (MappingNode) jobsNodeItem; - for (NodeTuple jobTuple: jobNode.getValue()) { - String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); - if (jobTupleKey.equals("steps")) - migrate18_steps((SequenceNode) jobTuple.getValueNode()); - } - } - } else if (specObjectKey.equals("stepTemplates")) { - SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); - for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { - MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; - for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { - String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); - if (stepTemplateTupleKey.equals("steps")) - migrate18_steps((SequenceNode) stepTemplateTuple.getValueNode()); - } - } - } - } - } - -} +package io.onedev.server.buildspec; + +import com.google.common.base.Preconditions; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import com.google.common.collect.Lists; +import io.onedev.commons.codeassist.InputCompletion; +import io.onedev.commons.codeassist.InputStatus; +import io.onedev.commons.codeassist.InputSuggestion; +import io.onedev.commons.utils.ExceptionUtils; +import io.onedev.commons.utils.LinearRange; +import io.onedev.commons.utils.StringUtils; +import io.onedev.commons.utils.WordUtils; +import io.onedev.server.OneDev; +import io.onedev.server.buildspec.job.Job; +import io.onedev.server.buildspec.job.JobDependency; +import io.onedev.server.buildspec.param.ParamUtils; +import io.onedev.server.buildspec.param.spec.ParamSpec; +import io.onedev.server.buildspec.step.Step; +import io.onedev.server.buildspec.step.StepTemplate; +import io.onedev.server.buildspec.step.UseTemplateStep; +import io.onedev.server.migration.VersionedYamlDoc; +import io.onedev.server.migration.XmlBuildSpecMigrator; +import io.onedev.server.util.ComponentContext; +import io.onedev.server.util.JobSecretAuthorizationContext; +import io.onedev.server.util.validation.Validatable; +import io.onedev.server.util.validation.annotation.ClassValidating; +import io.onedev.server.web.editable.annotation.Editable; +import io.onedev.server.web.page.project.blob.ProjectBlobPage; +import io.onedev.server.web.util.SuggestionUtils; +import io.onedev.server.web.util.WicketUtils; +import org.apache.commons.lang3.SerializationUtils; +import org.apache.wicket.Component; +import org.eclipse.jgit.revwalk.RevCommit; +import org.yaml.snakeyaml.DumperOptions.FlowStyle; +import org.yaml.snakeyaml.nodes.*; + +import javax.annotation.Nullable; +import javax.validation.*; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.function.Function; + +@Editable +@ClassValidating +public class BuildSpec implements Serializable, Validatable { + + private static final long serialVersionUID = 1L; + + private static final LoadingCache parseCache = + CacheBuilder.newBuilder().softValues().build(new CacheLoader() { + + @Override + public byte[] load(String key) { + String buildSpecString = key; + if (buildSpecString.trim().startsWith(" jobs = new ArrayList<>(); + + private List stepTemplates = new ArrayList<>(); + + private List services = new ArrayList<>(); + + private List properties = new ArrayList<>(); + + private List imports = new ArrayList<>(); + + private transient List importedBuildSpecs; + + private transient Map jobMap; + + private transient Map stepTemplateMap; + + private transient Map serviceMap; + + private transient Map propertyMap; + + @Editable + @Valid + public List getJobs() { + return jobs; + } + + public void setJobs(List jobs) { + this.jobs = jobs; + jobMap = null; + } + + @Editable + @Valid + public List getStepTemplates() { + return stepTemplates; + } + + public void setStepTemplates(List stepTemplates) { + this.stepTemplates = stepTemplates; + stepTemplateMap = null; + } + + @Editable + @Valid + public List getServices() { + return services; + } + + public void setServices(List services) { + this.services = services; + serviceMap = null; + } + + @Editable + public List getProperties() { + return properties; + } + + public void setProperties(List properties) { + this.properties = properties; + propertyMap = null; + } + + @Editable + @Valid + public List getImports() { + return imports; + } + + public void setImports(List imports) { + this.imports = imports; + importedBuildSpecs = null; + } + + private List getImportedBuildSpecs(Collection projectChain) { + if (importedBuildSpecs == null) { + importedBuildSpecs = new ArrayList<>(); + for (Import aImport: getImports()) { + if (!projectChain.contains(aImport.getProjectPath())) { + Collection newProjectChain = new HashSet<>(projectChain); + newProjectChain.add(aImport.getProjectPath()); + try { + BuildSpec importedBuildSpec = aImport.getBuildSpec(); + RevCommit commit = aImport.getProject().getRevCommit(aImport.getTag(), true); + JobSecretAuthorizationContext.push(new JobSecretAuthorizationContext(aImport.getProject(), commit, null)); + try { + importedBuildSpecs.addAll(importedBuildSpec.getImportedBuildSpecs(newProjectChain)); + } finally { + JobSecretAuthorizationContext.pop(); + } + importedBuildSpecs.add(importedBuildSpec); + } catch (Exception e) { + // Ignore here as we rely on this method to show viewer/editor + // Errors relating to this will be shown when validated + } + } + } + } + return importedBuildSpecs; + } + + public Map getJobMap() { + if (jobMap == null) { + jobMap = new LinkedHashMap<>(); + for (BuildSpec buildSpec: getImportedBuildSpecs(new HashSet<>())) { + for (Job job: buildSpec.getJobs()) + jobMap.put(job.getName(), job); + } + for (Job job: getJobs()) + jobMap.put(job.getName(), job); + } + return jobMap; + } + + public Map getPropertyMap() { + if (propertyMap == null) { + propertyMap = new LinkedHashMap<>(); + for (BuildSpec buildSpec: getImportedBuildSpecs(new HashSet<>())) { + for (Property property: buildSpec.getProperties()) + propertyMap.put(property.getName(), property); + } + for (Property property: getProperties()) + propertyMap.put(property.getName(), property); + } + return propertyMap; + } + + public Map getStepTemplateMap() { + if (stepTemplateMap == null) { + stepTemplateMap = new LinkedHashMap<>(); + for (BuildSpec buildSpec: getImportedBuildSpecs(new HashSet<>())) { + for (StepTemplate template: buildSpec.getStepTemplates()) + stepTemplateMap.put(template.getName(), template); + } + for (StepTemplate template: getStepTemplates()) + stepTemplateMap.put(template.getName(), template); + } + return stepTemplateMap; + } + + public Map getServiceMap() { + if (serviceMap == null) { + serviceMap = new LinkedHashMap<>(); + for (BuildSpec buildSpec: getImportedBuildSpecs(new HashSet<>())) { + for (Service service: buildSpec.getServices()) + serviceMap.put(service.getName(), service); + } + for (Service service: services) + serviceMap.put(service.getName(), service); + } + return serviceMap; + } + + private int getImportIndex(String namedElementName, Function> namedElementMapProvider) { + for (int i=imports.size()-1; i>=0; i--) { + if (namedElementMapProvider.apply(imports.get(i).getBuildSpec()).containsKey(namedElementName)) + return i; + } + return -1; + } + + private boolean validateImportedElements(ConstraintValidatorContext context, + List namedElements, Function> namedElementMapProvider, String elementTypeName) { + boolean isValid = true; + Validator validator = OneDev.getInstance(Validator.class); + for (T element: namedElementMapProvider.apply(this).values()) { + int elementIndex = namedElements.indexOf(element); + if (elementIndex == -1) { + int importIndex = getImportIndex(element.getName(), namedElementMapProvider); + for (ConstraintViolation violation: validator.validate(element)) { + String location; + if (violation.getPropertyPath().toString().length() != 0) + location = "location: " + violation.getPropertyPath() + ", "; + else + location = ""; + + String errorMessage = String.format("Error validating imported %s (%s: %s, %serror message: %s)", + elementTypeName, elementTypeName, element.getName(), location, violation.getMessage()); + context.buildConstraintViolationWithTemplate(errorMessage) + .addPropertyNode(PROP_IMPORTS) + .addBeanNode() + .inIterable().atIndex(importIndex) + .addConstraintViolation(); + isValid = false; + } + } + } + return isValid; + } + + @Override + public boolean isValid(ConstraintValidatorContext context) { + boolean isValid = true; + + if (!validateImportedElements(context, jobs, it->it.getJobMap(), "job")) + isValid = false; + if (!validateImportedElements(context, services, it->it.getServiceMap(), "service")) + isValid = false; + if (!validateImportedElements(context, stepTemplates, it->it.getStepTemplateMap(), "step template")) + isValid = false; + if (!validateImportedElements(context, properties, it->it.getPropertyMap(), "property")) + isValid = false; + + Set jobNames = new HashSet<>(); + for (Job job: jobs) { + if (!jobNames.add(job.getName())) { + context.buildConstraintViolationWithTemplate("Duplicate job name (" + job.getName() + ")") + .addPropertyNode(PROP_JOBS).addConstraintViolation(); + isValid = false; + } + } + Set serviceNames = new HashSet<>(); + for (Service service: services) { + if (!serviceNames.add(service.getName())) { + context.buildConstraintViolationWithTemplate("Duplicate service name (" + service.getName() + ")") + .addPropertyNode(PROP_SERVICES).addConstraintViolation(); + isValid = false; + } + } + Set stepTemplateNames = new HashSet<>(); + for (StepTemplate template: stepTemplates) { + if (!stepTemplateNames.add(template.getName())) { + context.buildConstraintViolationWithTemplate("Duplicate template name (" + template.getName() + ")") + .addPropertyNode(PROP_STEP_TEMPLATES).addConstraintViolation(); + isValid = false; + } + } + Set propertyNames = new HashSet<>(); + for (Property property: properties) { + if (!propertyNames.add(property.getName())) { + context.buildConstraintViolationWithTemplate("Duplicate property name (" + property.getName() + ")") + .addPropertyNode(PROP_PROPERTIES).addConstraintViolation(); + isValid = false; + } + } + Set importProjectNames = new HashSet<>(); + for (Import aImport: imports) { + if (!importProjectNames.add(aImport.getProjectPath())) { + context.buildConstraintViolationWithTemplate("Duplicate import (" + aImport.getProjectPath() + ")") + .addPropertyNode(PROP_IMPORTS).addConstraintViolation(); + isValid = false; + } + } + + if (isValid) { + for (StepTemplate template: getStepTemplateMap().values()) { + int templateIndex = stepTemplates.indexOf(template); + for (int stepIndex=0; stepIndex()); + } catch (Exception e) { + if (templateIndex != -1) { + context.buildConstraintViolationWithTemplate(e.getMessage()) + .addPropertyNode(PROP_STEP_TEMPLATES) + .addPropertyNode(StepTemplate.PROP_STEPS) + .inIterable().atIndex(templateIndex) + .addPropertyNode(UseTemplateStep.PROP_TEMPLATE_NAME) + .inIterable().atIndex(stepIndex) + .addConstraintViolation(); + } else { + int importIndex = getImportIndex(template.getName(), it->it.getStepTemplateMap()); + String errorMessage = String.format("Error validating imported step template (step template: %s, error message: %s)", + template.getName(), e.getMessage()); + context.buildConstraintViolationWithTemplate(errorMessage) + .addPropertyNode(PROP_IMPORTS) + .addBeanNode() + .inIterable().atIndex(importIndex) + .addConstraintViolation(); + } + isValid = false; + } + } + } + } + + for (Job job: getJobMap().values()) { + int jobIndex = jobs.indexOf(job); + try { + checkDependencies(job, new ArrayList<>()); + } catch (Exception e) { + if (jobIndex != -1) { + context.buildConstraintViolationWithTemplate(e.getMessage()) + .addPropertyNode(PROP_JOBS) + .addPropertyNode(Job.PROP_JOB_DEPENDENCIES) + .inIterable().atIndex(jobIndex) + .addConstraintViolation(); + } else { + int importIndex = getImportIndex(job.getName(), it->it.getJobMap()); + String errorMessage = String.format("Error validating imported job (job: %s, error message: %s)", + job.getName(), e.getMessage()); + context.buildConstraintViolationWithTemplate(errorMessage) + .addPropertyNode(PROP_IMPORTS) + .addBeanNode() + .inIterable().atIndex(importIndex) + .addConstraintViolation(); + } + isValid = false; + } + + for (String serviceName: job.getRequiredServices()) { + if (!getServiceMap().containsKey(serviceName)) { + context.buildConstraintViolationWithTemplate("Undefined service (" + serviceName + ")") + .addPropertyNode(PROP_JOBS) + .addPropertyNode(Job.PROP_REQUIRED_SERVICES) + .inIterable().atIndex(jobIndex) + .addConstraintViolation(); + isValid = false; + } + } + + for (int stepIndex=0; stepIndex()); + } catch (Exception e) { + if (jobIndex != -1) { + context.buildConstraintViolationWithTemplate(e.getMessage()) + .addPropertyNode(PROP_JOBS) + .addPropertyNode(Job.PROP_STEPS) + .inIterable().atIndex(jobIndex) + .addPropertyNode(UseTemplateStep.PROP_TEMPLATE_NAME) + .inIterable().atIndex(stepIndex) + .addConstraintViolation(); + } else { + int importIndex = getImportIndex(job.getName(), it->it.getJobMap()); + String errorMessage = String.format("Error validating imported job (job: %s, location: steps[%d].templateName, error message: %s)", + job.getName(), stepIndex, e.getMessage()); + context.buildConstraintViolationWithTemplate(errorMessage) + .addPropertyNode(PROP_IMPORTS) + .addBeanNode() + .inIterable().atIndex(importIndex) + .addConstraintViolation(); + } + isValid = false; + } + } + } + } + } + + if (isValid) { + // Build spec and jobs are valid so far, we can do more validations with them safely + for (Job job: getJobMap().values()) { + int jobIndex = jobs.indexOf(job); + for (int actionIndex=0; actionIndexit.getJobMap()); + String errorMessage = String.format("Error validating imported job (job: %s, error message: %s)", + job.getName(), e.getMessage()); + context.buildConstraintViolationWithTemplate(errorMessage) + .addPropertyNode(PROP_IMPORTS) + .addBeanNode() + .inIterable().atIndex(importIndex) + .addConstraintViolation(); + } + isValid = false; + } + } + } + } + + if (!isValid) + context.disableDefaultConstraintViolation(); + return isValid; + } + + private void checkTemplateUsages(UseTemplateStep step, List templateChain) { + if(templateChain.contains(step.getTemplateName())) { + templateChain.add(step.getTemplateName()); + throw new ValidationException("Circular template usages (" + templateChain + ")"); + } else { + StepTemplate template = getStepTemplateMap().get(step.getTemplateName()); + if (template != null) { + if (templateChain.isEmpty()) { + try { + ParamUtils.validateParams(template.getParamSpecs(), step.getParams()); + } catch (Exception e) { + throw new ValidationException(String.format("Error validating step template parameters (%s)", e.getMessage())); + } + } + templateChain.add(step.getTemplateName()); + for (Step templateStep: template.getSteps()) { + if (templateStep instanceof UseTemplateStep) + checkTemplateUsages((UseTemplateStep) templateStep, new ArrayList<>(templateChain)); + } + } else if (templateChain.isEmpty()) { + throw new ValidationException("Step template not found (" + step.getTemplateName() + ")"); + } + } + } + + private void checkDependencies(Job job, List dependencyChain) { + for (JobDependency dependency: job.getJobDependencies()) { + if (dependencyChain.contains(dependency.getJobName())) { + dependencyChain.add(dependency.getJobName()); + throw new ValidationException("Circular dependencies (" + dependencyChain + ")"); + } else { + Job dependencyJob = getJobMap().get(dependency.getJobName()); + if (dependencyJob != null) { + if (dependencyChain.isEmpty()) { + try { + ParamUtils.validateParams(dependencyJob.getParamSpecs(), dependency.getJobParams()); + } catch (ValidationException e) { + String message = String.format("Error validating dependency job parameters (dependency job: %s, error message: %s)", + dependencyJob.getName(), e.getMessage()); + throw new ValidationException(message); + } + } + List newDependencyChain = new ArrayList<>(dependencyChain); + newDependencyChain.add(dependency.getJobName()); + checkDependencies(dependencyJob, newDependencyChain); + } else if (dependencyChain.isEmpty()) { + throw new ValidationException("Dependency job not found (" + dependency.getJobName() + ")"); + } + } + } + } + + public static List suggestOverrides(List imported, InputStatus status) { + List completions = new ArrayList<>(); + String matchWith = status.getContentBeforeCaret().toLowerCase(); + for (String each: imported) { + LinearRange match = LinearRange.match(each, matchWith); + if (match != null) { + completions.add(new InputCompletion(each, each + status.getContentAfterCaret(), + each.length(), "override imported", match)); + } + } + + return completions; + } + + @Nullable + public static BuildSpec get() { + Component component = ComponentContext.get().getComponent(); + BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class); + if (buildSpecAware != null) + return buildSpecAware.getBuildSpec(); + else + return null; + } + + public static List suggestVariables(String matchWith, + boolean withBuildVersion, boolean withDynamicVariables, boolean withPauseCommand) { + List suggestions = new ArrayList<>(); + BuildSpec buildSpec = get(); + if (buildSpec != null) { + ProjectBlobPage page = (ProjectBlobPage) WicketUtils.getPage(); + suggestions.addAll(SuggestionUtils.suggestVariables( + page.getProject(), buildSpec, ParamSpec.list(), + matchWith, withBuildVersion, withDynamicVariables, withPauseCommand)); + } + return suggestions; + } + + @Nullable + public static BuildSpec parse(byte[] bytes) { + String buildSpecString = new String(bytes, StandardCharsets.UTF_8); + if (StringUtils.isNotBlank(buildSpecString)) { + try { + return SerializationUtils.deserialize(parseCache.getUnchecked(buildSpecString)); + } catch (Exception e) { + BuildSpecParseException parseException = ExceptionUtils.find(e, BuildSpecParseException.class); + if (parseException != null) + throw parseException; + else + throw e; + } + } else { + return null; + } + } + + @SuppressWarnings("unused") + private void migrate1(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { + NodeTuple jobTuple = itJobTuple.next(); + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("submoduleCredentials")) { + itJobTuple.remove(); + } else if (jobTupleKey.equals("projectDependencies")) { + SequenceNode projectDependenciesNode = (SequenceNode) jobTuple.getValueNode(); + for (Node projectDependenciesItem: projectDependenciesNode.getValue()) { + MappingNode projectDependencyNode = (MappingNode) projectDependenciesItem; + for (Iterator itProjectDependencyTuple = projectDependencyNode.getValue().iterator(); + itProjectDependencyTuple.hasNext();) { + NodeTuple projectDependencyTuple = itProjectDependencyTuple.next(); + if (((ScalarNode)projectDependencyTuple.getKeyNode()).getValue().equals("authentication")) + itProjectDependencyTuple.remove(); + } + } + } + } + NodeTuple cloneCredentialTuple = new NodeTuple( + new ScalarNode(Tag.STR, "cloneCredential"), + new MappingNode(new Tag("!DefaultCredential"), Lists.newArrayList(), FlowStyle.BLOCK)); + jobNode.getValue().add(cloneCredentialTuple); + } + } + } + } + + @SuppressWarnings("unused") + private void migrate2(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { + NodeTuple jobTuple = itJobTuple.next(); + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("defaultFixedIssuesFilter")) { + itJobTuple.remove(); + } else if (jobTupleKey.equals("reports")) { + SequenceNode reportsNode = (SequenceNode) jobTuple.getValueNode(); + for (Iterator itReportsItem = reportsNode.getValue().iterator(); itReportsItem.hasNext();) { + MappingNode reportNode = (MappingNode) itReportsItem.next(); + if (reportNode.getTag().getValue().equals("!JobHtmlReport")) + itReportsItem.remove(); + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate3(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("reports")) { + SequenceNode reportsNode = (SequenceNode) jobTuple.getValueNode(); + for (Node reportNode: reportsNode.getValue()) { + if (reportNode.getTag().getValue().equals("!JobJestReport")) + reportNode.setTag(new Tag("!JobJestTestReport")); + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate4(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("triggers")) { + SequenceNode triggersNode = (SequenceNode) jobTuple.getValueNode(); + for (Node triggerNode: triggersNode.getValue()) { + if (triggerNode.getTag().getValue().equals("!PullRequestTrigger")) + triggerNode.setTag(new Tag("!PullRequestUpdateTrigger")); + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate5(VersionedYamlDoc doc, Stack versions) { + List newServiceNodes = new ArrayList<>(); + for (NodeTuple specTuple: doc.getValue()) { + if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + Node imageNode = null; + Node commandsNode = null; + Node servicesNode = null; + String jobName = null; + for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { + NodeTuple jobTuple = itJobTuple.next(); + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("name")) { + jobName = ((ScalarNode)jobTuple.getValueNode()).getValue(); + } else if (jobTupleKey.equals("image")) { + imageNode = jobTuple.getValueNode(); + itJobTuple.remove(); + } else if (jobTupleKey.equals("commands")) { + commandsNode = jobTuple.getValueNode(); + itJobTuple.remove(); + } else if (jobTupleKey.equals("services")) { + servicesNode = jobTuple.getValueNode(); + itJobTuple.remove(); + } + } + + Preconditions.checkState(jobName != null && imageNode != null && commandsNode != null); + + List stepTuples = new ArrayList<>(); + stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "image"), imageNode)); + stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "commands"), commandsNode)); + stepTuples.add(new NodeTuple( + new ScalarNode(Tag.STR, "condition"), + new ScalarNode(Tag.STR, "ALL_PREVIOUS_STEPS_WERE_SUCCESSFUL"))); + Node stepNode = new MappingNode(new Tag("!CommandStep"), stepTuples, FlowStyle.BLOCK); + Node stepsNode = new SequenceNode(Tag.SEQ, Lists.newArrayList(stepNode), FlowStyle.BLOCK); + NodeTuple stepsTuple = new NodeTuple(new ScalarNode(Tag.STR, "steps"), stepsNode); + jobNode.getValue().add(stepsTuple); + + if (servicesNode != null) { + List serviceNameNodes = new ArrayList<>(); + for (Node serviceNodeItem: ((SequenceNode) servicesNode).getValue()) { + MappingNode serviceNode = (MappingNode) serviceNodeItem; + List newServiceTuples = new ArrayList<>(); + for (NodeTuple serviceTuple: serviceNode.getValue()) { + if (((ScalarNode)serviceTuple.getKeyNode()).getValue().equals("name")) { + String newServiceName = jobName + "-" + + ((ScalarNode)serviceTuple.getValueNode()).getValue(); + serviceNameNodes.add(new ScalarNode(Tag.STR, newServiceName)); + newServiceTuples.add(new NodeTuple( + new ScalarNode(Tag.STR, "name"), + new ScalarNode(Tag.STR, newServiceName))); + } else { + newServiceTuples.add(serviceTuple); + } + } + newServiceNodes.add(new MappingNode(Tag.MAP, newServiceTuples, FlowStyle.BLOCK)); + } + jobNode.getValue().add(new NodeTuple( + new ScalarNode(Tag.STR, "requiredServices"), + new SequenceNode(Tag.SEQ, serviceNameNodes, FlowStyle.BLOCK))); + } + } + } + } + + if (!newServiceNodes.isEmpty()) { + doc.getValue().add(new NodeTuple( + new ScalarNode(Tag.STR, "services"), + new SequenceNode(Tag.SEQ, newServiceNodes, FlowStyle.BLOCK))); + } + } + + @SuppressWarnings("unused") + private void migrate6(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + boolean retrieveSource = false; + Node cloneCredentialNode = null; + Node cloneDepthNode = null; + Node artifactsNode = null; + SequenceNode reportsNode = null; + SequenceNode stepsNode = null; + List actionNodes = new ArrayList<>(); + for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { + NodeTuple jobTuple = itJobTuple.next(); + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("retrieveSource")) { + retrieveSource = ((ScalarNode)jobTuple.getValueNode()).getValue().equals("true"); + itJobTuple.remove(); + } else if (jobTupleKey.equals("cloneCredential")) { + cloneCredentialNode = jobTuple.getValueNode(); + itJobTuple.remove(); + } else if (jobTupleKey.equals("cloneDepth")) { + cloneDepthNode = jobTuple.getValueNode(); + itJobTuple.remove(); + } else if (jobTupleKey.equals("artifacts")) { + artifactsNode = jobTuple.getValueNode(); + itJobTuple.remove(); + } else if (jobTupleKey.equals("reports")) { + reportsNode = (SequenceNode) jobTuple.getValueNode(); + itJobTuple.remove(); + } else if (jobTupleKey.equals("steps")) { + stepsNode = (SequenceNode) jobTuple.getValueNode(); + } else if (jobTupleKey.equals("postBuildActions")) { + SequenceNode actionsNode = (SequenceNode) jobTuple.getValueNode(); + for (Iterator itActionNode = actionsNode.getValue().iterator(); itActionNode.hasNext();) { + MappingNode actionNode = (MappingNode) itActionNode.next(); + String tagName = actionNode.getTag().getValue(); + if (tagName.equals("!CreateTagAction") || tagName.equals("!CloseMilestoneAction")) { + actionNodes.add(actionNode); + itActionNode.remove(); + } + } + if (actionsNode.getValue().isEmpty()) + itJobTuple.remove(); + } + } + Preconditions.checkState(cloneCredentialNode != null && stepsNode != null); + if (retrieveSource) { + List stepTuples = new ArrayList<>(); + stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "cloneCredential"), cloneCredentialNode)); + if (cloneDepthNode != null) + stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "cloneDepth"), cloneDepthNode)); + stepTuples.add(new NodeTuple( + new ScalarNode(Tag.STR, "condition"), + new ScalarNode(Tag.STR, "ALL_PREVIOUS_STEPS_WERE_SUCCESSFUL"))); + Node stepNode = new MappingNode(new Tag("!CheckoutStep"), stepTuples, FlowStyle.BLOCK); + stepsNode.getValue().add(0, stepNode); + } + if (artifactsNode != null) { + List stepTuples = new ArrayList<>(); + stepTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "artifacts"), artifactsNode)); + stepTuples.add(new NodeTuple( + new ScalarNode(Tag.STR, "condition"), + new ScalarNode(Tag.STR, "ALL_PREVIOUS_STEPS_WERE_SUCCESSFUL"))); + Node stepNode = new MappingNode(new Tag("!PublishArtifactStep"), stepTuples, FlowStyle.BLOCK); + stepsNode.getValue().add(stepNode); + } + if (reportsNode != null) { + for (Node reportsNodeItem: reportsNode.getValue()) { + MappingNode reportNode = (MappingNode) reportsNodeItem; + List stepTuples = new ArrayList<>(); + stepTuples.addAll(reportNode.getValue()); + stepTuples.add(new NodeTuple( + new ScalarNode(Tag.STR, "condition"), + new ScalarNode(Tag.STR, "ALWAYS"))); + String tagName = reportNode.getTag().getValue(); + tagName = tagName.replaceFirst("Job", "Publish") + "Step"; + Node stepNode = new MappingNode(new Tag(tagName), stepTuples, FlowStyle.BLOCK); + stepsNode.getValue().add(stepNode); + } + } + for (MappingNode actionNode: actionNodes) { + String tagName = actionNode.getTag().getValue().replace("Action", "Step"); + List stepTuples = new ArrayList<>(); + for (NodeTuple tuple: actionNode.getValue()) { + String key = ((ScalarNode)tuple.getKeyNode()).getValue(); + if (!key.equals("condition")) + stepTuples.add(tuple); + } + stepTuples.add(new NodeTuple( + new ScalarNode(Tag.STR, "condition"), + new ScalarNode(Tag.STR, "ALL_PREVIOUS_STEPS_WERE_SUCCESSFUL"))); + Node stepNode = new MappingNode(new Tag(tagName), stepTuples, FlowStyle.BLOCK); + stepsNode.getValue().add(stepNode); + } + } + } + } + + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("steps")) { + SequenceNode stepsNode = (SequenceNode) jobTuple.getValueNode(); + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + String tagName = stepNode.getTag().getValue(); + String stepName = WordUtils.uncamel(tagName.substring(1).replace("Step", "")).toLowerCase(); + stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "name"), + new ScalarNode(Tag.STR, stepName))); + } + } + } + } + } else if (specObjectKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stepTemplateTupleKey.equals("steps")) { + SequenceNode stepsNode = (SequenceNode) stepTemplateTuple.getValueNode(); + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + String tagName = stepNode.getTag().getValue(); + String stepName = WordUtils.uncamel(tagName.substring(1).replace("Step", "")).toLowerCase(); + stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "name"), + new ScalarNode(Tag.STR, stepName))); + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate7(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("paramSpecs")) { + SequenceNode paramsNode = (SequenceNode) jobTuple.getValueNode(); + for (Node paramsNodeItem: paramsNode.getValue()) { + MappingNode paramNode = (MappingNode) paramsNodeItem; + String paramType = paramNode.getTag().getValue(); + if (paramType.equals("!NumberParam")) { + paramNode.setTag(new Tag("!IntegerParam")); + } else if (paramType.equals("!TextParam")) { + NodeTuple multilineTuple = new NodeTuple( + new ScalarNode(Tag.STR, "multiline"), + new ScalarNode(Tag.STR, "false")); + paramNode.getValue().add(multilineTuple); + } + } + } + } + } + } else if (specKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stemTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stemTemplateTupleKey.equals("paramSpecs")) { + SequenceNode paramsNode = (SequenceNode) stepTemplateTuple.getValueNode(); + for (Node paramsNodeItem: paramsNode.getValue()) { + MappingNode paramNode = (MappingNode) paramsNodeItem; + String paramType = paramNode.getTag().getValue(); + if (paramType.equals("!NumberParam")) { + paramNode.setTag(new Tag("!IntegerParam")); + } else if (paramType.equals("!TextParam")) { + NodeTuple multilineTuple = new NodeTuple( + new ScalarNode(Tag.STR, "multiline"), + new ScalarNode(Tag.STR, "false")); + paramNode.getValue().add(multilineTuple); + } + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate8(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("projectDependencies")) { + SequenceNode projectDependenciesNode = (SequenceNode) jobTuple.getValueNode(); + for (Node projectDependenciesNodeItem: projectDependenciesNode.getValue()) { + MappingNode projectDependencyNode = (MappingNode) projectDependenciesNodeItem; + String buildNumber = null; + for (Iterator itProjectDependencyTuple = projectDependencyNode.getValue().iterator(); itProjectDependencyTuple.hasNext();) { + NodeTuple projectDependencyTuple = itProjectDependencyTuple.next(); + String projectDependencyTupleKey = ((ScalarNode)projectDependencyTuple.getKeyNode()).getValue(); + if (projectDependencyTupleKey.equals("buildNumber")) { + buildNumber = ((ScalarNode)projectDependencyTuple.getValueNode()).getValue(); + itProjectDependencyTuple.remove(); + break; + } + } + Preconditions.checkNotNull(buildNumber); + + List buildProviderTuples = new ArrayList<>(); + buildProviderTuples.add(new NodeTuple( + new ScalarNode(Tag.STR, "buildNumber"), + new ScalarNode(Tag.STR, buildNumber))); + Node buildProviderNode = new MappingNode(new Tag("!SpecifiedBuild"), buildProviderTuples, FlowStyle.BLOCK); + projectDependencyNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "buildProvider"), buildProviderNode)); + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate9(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("steps")) { + SequenceNode stepsNode = (SequenceNode) jobTuple.getValueNode(); + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!CommandStep")) { + stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "useTTY"), + new ScalarNode(Tag.BOOL, "false"))); + } + } + } + } + } + } else if (specObjectKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stepTemplateTupleKey.equals("steps")) { + SequenceNode stepsNode = (SequenceNode) stepTemplateTuple.getValueNode(); + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!CommandStep")) { + stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "useTTY"), + new ScalarNode(Tag.BOOL, "false"))); + } + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate10(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("cpuRequirement")) { + ScalarNode cpuRequirementNode = (ScalarNode) jobTuple.getValueNode(); + String cpuRequirement = cpuRequirementNode.getValue(); + cpuRequirementNode.setValue(cpuRequirement.substring(0, cpuRequirement.length()-1)); + } else if (jobTupleKey.equals("memoryRequirement")) { + ScalarNode memoryRequirementNode = (ScalarNode) jobTuple.getValueNode(); + String memoryRequirement = memoryRequirementNode.getValue(); + memoryRequirementNode.setValue(memoryRequirement.substring(0, memoryRequirement.length()-1)); + } + } + } + } else if (specObjectKey.equals("services")) { + SequenceNode servicesNode = (SequenceNode) specTuple.getValueNode(); + for (Node servicesNodeItem: servicesNode.getValue()) { + MappingNode serviceNode = (MappingNode) servicesNodeItem; + for (NodeTuple serviceTuple: serviceNode.getValue()) { + String serviceTupleKey = ((ScalarNode)serviceTuple.getKeyNode()).getValue(); + if (serviceTupleKey.equals("cpuRequirement")) { + ScalarNode cpuRequirementNode = (ScalarNode) serviceTuple.getValueNode(); + String cpuRequirement = cpuRequirementNode.getValue(); + cpuRequirementNode.setValue(cpuRequirement.substring(0, cpuRequirement.length()-1)); + } else if (serviceTupleKey.equals("memoryRequirement")) { + ScalarNode memoryRequirementNode = (ScalarNode) serviceTuple.getValueNode(); + String memoryRequirement = memoryRequirementNode.getValue(); + memoryRequirementNode.setValue(memoryRequirement.substring(0, memoryRequirement.length()-1)); + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate11(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("steps")) { + SequenceNode stepsNode = (SequenceNode) jobTuple.getValueNode(); + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!CheckoutStep")) { + stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "withLfs"), + new ScalarNode(Tag.BOOL, "false"))); + stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "withSubmodules"), + new ScalarNode(Tag.BOOL, "true"))); + } + } + } + } + } + } else if (specObjectKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stepTemplateTupleKey.equals("steps")) { + SequenceNode stepsNode = (SequenceNode) stepTemplateTuple.getValueNode(); + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!CheckoutStep")) { + stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "withLfs"), + new ScalarNode(Tag.BOOL, "false"))); + stepNode.getValue().add(new NodeTuple(new ScalarNode(Tag.STR, "withSubmodules"), + new ScalarNode(Tag.BOOL, "true"))); + } + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate12(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("steps")) { + SequenceNode stepsNode = (SequenceNode) jobTuple.getValueNode(); + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!PublishJestTestReportStep")) + stepNode.setTag(new Tag("!PublishJestReportStep")); + } + } + } + } + } else if (specObjectKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stepTemplateTupleKey.equals("steps")) { + SequenceNode stepsNode = (SequenceNode) stepTemplateTuple.getValueNode(); + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!PublishJestTestReportStep")) + stepNode.setTag(new Tag("!PublishJestReportStep")); + } + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate13(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specTupleKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specTupleKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { + NodeTuple jobTuple = itJobTuple.next(); + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("projectDependencies")) { + SequenceNode projectDependenciesNode = (SequenceNode) jobTuple.getValueNode(); + for (Node projectDependenciesItem: projectDependenciesNode.getValue()) { + MappingNode projectDependencyNode = (MappingNode) projectDependenciesItem; + for (Iterator itProjectDependencyTuple = projectDependencyNode.getValue().iterator(); + itProjectDependencyTuple.hasNext();) { + NodeTuple projectDependencyTuple = itProjectDependencyTuple.next(); + ScalarNode projectDependencyTupleKeyNode = ((ScalarNode)projectDependencyTuple.getKeyNode()); + if (projectDependencyTupleKeyNode.getValue().equals("projectName")) + projectDependencyTupleKeyNode.setValue("projectPath"); + } + } + } + } + } + } else if (specTupleKey.equals("imports")) { + SequenceNode importsNode = (SequenceNode) specTuple.getValueNode(); + for (Node importsNodeItem: importsNode.getValue()) { + MappingNode importNode = (MappingNode) importsNodeItem; + for (Iterator itImportTuple = importNode.getValue().iterator(); itImportTuple.hasNext();) { + NodeTuple importTuple = itImportTuple.next(); + ScalarNode importTupleKeyNode = (ScalarNode)importTuple.getKeyNode(); + if (importTupleKeyNode.getValue().equals("projectName")) + importTupleKeyNode.setValue("projectPath"); + } + } + } + } + } + + private void migrate14_steps(SequenceNode stepsNode) { + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!CommandStep")) { + Node commandsNode = null; + for (Iterator itStepNodeTuple = stepNode.getValue().iterator(); itStepNodeTuple.hasNext();) { + NodeTuple stepNodeTuple = itStepNodeTuple.next(); + if (((ScalarNode)stepNodeTuple.getKeyNode()).getValue().equals("commands")) { + commandsNode = stepNodeTuple.getValueNode(); + itStepNodeTuple.remove(); + break; + } + } + if (commandsNode != null) { + List interpreterTuples = new ArrayList<>(); + interpreterTuples.add(new NodeTuple(new ScalarNode(Tag.STR, "commands"), commandsNode)); + stepNode.getValue().add(new NodeTuple( + new ScalarNode(Tag.STR, "interpreter"), + new MappingNode(new Tag("!DefaultInterpreter"), interpreterTuples, FlowStyle.BLOCK))); + } + stepNode.getValue().add(new NodeTuple( + new ScalarNode(Tag.STR, "runInContainer"), + new ScalarNode(Tag.BOOL, "true"))); + } + } + } + + @SuppressWarnings("unused") + private void migrate14(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("steps")) + migrate14_steps((SequenceNode) jobTuple.getValueNode()); + } + } + } else if (specObjectKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stepTemplateTupleKey.equals("steps")) + migrate14_steps((SequenceNode) stepTemplateTuple.getValueNode()); + } + } + } + } + } + + private void migrate15_steps(SequenceNode stepsNode) { + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!BuildImageStep")) { + for (Iterator itStepNodeTuple = stepNode.getValue().iterator(); itStepNodeTuple.hasNext();) { + NodeTuple stepNodeTuple = itStepNodeTuple.next(); + String key = ((ScalarNode)stepNodeTuple.getKeyNode()).getValue(); + if (key.equals("useTTY") || key.equals("login")) + itStepNodeTuple.remove(); + } + } + } + } + + @SuppressWarnings("unused") + private void migrate15(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("steps")) + migrate15_steps((SequenceNode) jobTuple.getValueNode()); + } + } + } else if (specObjectKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stepTemplateTupleKey.equals("steps")) + migrate15_steps((SequenceNode) stepTemplateTuple.getValueNode()); + } + } + } + } + } + + private void migrate16_steps(SequenceNode stepsNode) { + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!CommandStep")) { + for (Iterator itStepNodeTuple = stepNode.getValue().iterator(); itStepNodeTuple.hasNext();) { + NodeTuple stepNodeTuple = itStepNodeTuple.next(); + String key = ((ScalarNode)stepNodeTuple.getKeyNode()).getValue(); + if (key.equals("interpreter")) { + MappingNode interpreterNode = (MappingNode) stepNodeTuple.getValueNode(); + if (interpreterNode.getTag().getValue().equals("!BashInterpreter")) { + interpreterNode.setTag(new Tag("!ShellInterpreter")); + interpreterNode.getValue().add(new NodeTuple( + new ScalarNode(Tag.STR, "shell"), new ScalarNode(Tag.STR, "bash"))); + } + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate16(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("steps")) + migrate16_steps((SequenceNode) jobTuple.getValueNode()); + } + } + } else if (specObjectKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stepTemplateTupleKey.equals("steps")) + migrate16_steps((SequenceNode) stepTemplateTuple.getValueNode()); + } + } + } + } + } + + @SuppressWarnings("unused") + private void migrate17(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + if (((ScalarNode)specTuple.getKeyNode()).getValue().equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + List actionNodes = new ArrayList<>(); + for (Iterator itJobTuple = jobNode.getValue().iterator(); itJobTuple.hasNext();) { + NodeTuple jobTuple = itJobTuple.next(); + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("postBuildActions")) { + SequenceNode actionsNode = (SequenceNode) jobTuple.getValueNode(); + for (Node actionNodeItem: actionsNode.getValue()) { + MappingNode actionNode = (MappingNode) actionNodeItem; + if (actionNode.getTag().getValue().equals("!CreateIssueAction")) { + actionNode.getValue().add(new NodeTuple( + new ScalarNode(Tag.STR, "issueConfidential"), new ScalarNode(Tag.STR, "false"))); + } + } + } + } + } + } + } + } + + private void migrate18_steps(SequenceNode stepsNode) { + for (Node stepsNodeItem: stepsNode.getValue()) { + MappingNode stepNode = (MappingNode) stepsNodeItem; + if (stepNode.getTag().getValue().equals("!PullRepository")) { + stepNode.getValue().add(new NodeTuple( + new ScalarNode(Tag.STR, "syncToChildProject"), new ScalarNode(Tag.STR, "false"))); + } + } + } + + @SuppressWarnings("unused") + private void migrate18(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (NodeTuple jobTuple: jobNode.getValue()) { + String jobTupleKey = ((ScalarNode)jobTuple.getKeyNode()).getValue(); + if (jobTupleKey.equals("steps")) + migrate18_steps((SequenceNode) jobTuple.getValueNode()); + } + } + } else if (specObjectKey.equals("stepTemplates")) { + SequenceNode stepTemplatesNode = (SequenceNode) specTuple.getValueNode(); + for (Node stepTemplatesNodeItem: stepTemplatesNode.getValue()) { + MappingNode stepTemplateNode = (MappingNode) stepTemplatesNodeItem; + for (NodeTuple stepTemplateTuple: stepTemplateNode.getValue()) { + String stepTemplateTupleKey = ((ScalarNode)stepTemplateTuple.getKeyNode()).getValue(); + if (stepTemplateTupleKey.equals("steps")) + migrate18_steps((SequenceNode) stepTemplateTuple.getValueNode()); + } + } + } + } + } + + private void migrate19(VersionedYamlDoc doc, Stack versions) { + for (NodeTuple specTuple: doc.getValue()) { + String specObjectKey = ((ScalarNode)specTuple.getKeyNode()).getValue(); + if (specObjectKey.equals("jobs")) { + SequenceNode jobsNode = (SequenceNode) specTuple.getValueNode(); + for (Node jobsNodeItem: jobsNode.getValue()) { + MappingNode jobNode = (MappingNode) jobsNodeItem; + for (var it = jobNode.getValue().iterator(); it.hasNext();) { + String jobTupleKey = ((ScalarNode)it.next().getKeyNode()).getValue(); + if (jobTupleKey.equals("cpuRequirement") || jobTupleKey.equals("memoryRequirement")) + it.remove(); + } + } + } else if (specObjectKey.equals("services")) { + SequenceNode servicesNode = (SequenceNode) specTuple.getValueNode(); + for (Node servicesNodeItem: servicesNode.getValue()) { + MappingNode serviceNode = (MappingNode) servicesNodeItem; + for (var it = serviceNode.getValue().iterator(); it.hasNext();) { + String serviceTupleKey = ((ScalarNode)it.next().getKeyNode()).getValue(); + if (serviceTupleKey.equals("cpuRequirement") || serviceTupleKey.equals("memoryRequirement")) + it.remove(); + } + } + } + } + } +} diff --git a/server-core/src/main/java/io/onedev/server/buildspec/Service.java b/server-core/src/main/java/io/onedev/server/buildspec/Service.java index e1f220674b..42f7219154 100644 --- a/server-core/src/main/java/io/onedev/server/buildspec/Service.java +++ b/server-core/src/main/java/io/onedev/server/buildspec/Service.java @@ -1,148 +1,123 @@ -package io.onedev.server.buildspec; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.validation.constraints.NotEmpty; - -import io.onedev.commons.codeassist.InputCompletion; -import io.onedev.commons.codeassist.InputStatus; -import io.onedev.commons.codeassist.InputSuggestion; -import io.onedev.server.buildspec.job.EnvVar; -import io.onedev.server.util.validation.annotation.DnsName; -import io.onedev.server.web.editable.annotation.Editable; -import io.onedev.server.web.editable.annotation.Interpolative; -import io.onedev.server.web.editable.annotation.SuggestionProvider; - -@Editable -public class Service implements NamedElement, Serializable { - - private static final long serialVersionUID = 1L; - - private String name; - - private String image; - - private String arguments; - - private List envVars = new ArrayList<>(); - - private String readinessCheckCommand; - - private int cpuRequirement = 250; - - private int memoryRequirement = 256; - - @Editable(order=100, description="Specify name of the service, which will be used as host name to access the service") - @SuggestionProvider("getNameSuggestions") - @DnsName - @NotEmpty - @Override - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - @SuppressWarnings("unused") - private static List getNameSuggestions(InputStatus status) { - BuildSpec buildSpec = BuildSpec.get(); - if (buildSpec != null) { - List candidates = new ArrayList<>(buildSpec.getServiceMap().keySet()); - buildSpec.getServices().forEach(it->candidates.remove(it.getName())); - return BuildSpec.suggestOverrides(candidates, status); - } - return new ArrayList<>(); - } - - @Editable(order=200, description="Specify docker image of the service") - @Interpolative(variableSuggester="suggestVariables") - @NotEmpty - public String getImage() { - return image; - } - - public void setImage(String image) { - this.image = image; - } - - @Editable(order=220, description="Optionally specify arguments to run above image") - @Interpolative(variableSuggester="suggestVariables") - public String getArguments() { - return arguments; - } - - public void setArguments(String arguments) { - this.arguments = arguments; - } - - @Editable(order=300, name="Environment Variables", description="Optionally specify environment variables of " - + "the service") - public List getEnvVars() { - return envVars; - } - - public void setEnvVars(List envVars) { - this.envVars = envVars; - } - - @Editable(order=400, description="Specify command to check readiness of the service. This command will " - + "be interpretated by cmd.exe on Windows images, and by shell on Linux images. It will be " - + "executed repeatedly until a zero code is returned to indicate service ready") - @Interpolative(variableSuggester="suggestVariables") - @NotEmpty - public String getReadinessCheckCommand() { - return readinessCheckCommand; - } - - public void setReadinessCheckCommand(String readinessCheckCommand) { - this.readinessCheckCommand = readinessCheckCommand; - } - - @Editable(order=10000, name="CPU Requirement", group="More Settings", description="Specify CPU requirement of the service in millis. " - + "1000 millis means a single CPU core") - public int getCpuRequirement() { - return cpuRequirement; - } - - public void setCpuRequirement(int cpuRequirement) { - this.cpuRequirement = cpuRequirement; - } - - @Editable(order=10100, group="More Settings", description="Specify memory requirement of the service in mega bytes") - public int getMemoryRequirement() { - return memoryRequirement; - } - - public void setMemoryRequirement(int memoryRequirement) { - this.memoryRequirement = memoryRequirement; - } - - @SuppressWarnings("unused") - private static List suggestVariables(String matchWith) { - return BuildSpec.suggestVariables(matchWith, false, false, false); - } - - public Map toMap() { - Map serviceMap = new HashMap<>(); - - serviceMap.put("name", getName()); - serviceMap.put("image", getImage()); - serviceMap.put("readinessCheckCommand", getReadinessCheckCommand()); - serviceMap.put("cpuRequirement", getCpuRequirement()); - serviceMap.put("memoryRequirement", getMemoryRequirement()); - serviceMap.put("arguments", getArguments()); - Map envVars = new HashMap<>(); - for (EnvVar var: getEnvVars()) - envVars.put(var.getName(), var.getValue()); - serviceMap.put("envVars", (Serializable) envVars); - - return serviceMap; - } - -} +package io.onedev.server.buildspec; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.validation.constraints.NotEmpty; + +import io.onedev.commons.codeassist.InputCompletion; +import io.onedev.commons.codeassist.InputStatus; +import io.onedev.commons.codeassist.InputSuggestion; +import io.onedev.server.buildspec.job.EnvVar; +import io.onedev.server.util.validation.annotation.DnsName; +import io.onedev.server.web.editable.annotation.Editable; +import io.onedev.server.web.editable.annotation.Interpolative; +import io.onedev.server.web.editable.annotation.SuggestionProvider; + +@Editable +public class Service implements NamedElement, Serializable { + + private static final long serialVersionUID = 1L; + + private String name; + + private String image; + + private String arguments; + + private List envVars = new ArrayList<>(); + + private String readinessCheckCommand; + + @Editable(order=100, description="Specify name of the service, which will be used as host name to access the service") + @SuggestionProvider("getNameSuggestions") + @DnsName + @NotEmpty + @Override + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @SuppressWarnings("unused") + private static List getNameSuggestions(InputStatus status) { + BuildSpec buildSpec = BuildSpec.get(); + if (buildSpec != null) { + List candidates = new ArrayList<>(buildSpec.getServiceMap().keySet()); + buildSpec.getServices().forEach(it->candidates.remove(it.getName())); + return BuildSpec.suggestOverrides(candidates, status); + } + return new ArrayList<>(); + } + + @Editable(order=200, description="Specify docker image of the service") + @Interpolative(variableSuggester="suggestVariables") + @NotEmpty + public String getImage() { + return image; + } + + public void setImage(String image) { + this.image = image; + } + + @Editable(order=220, description="Optionally specify arguments to run above image") + @Interpolative(variableSuggester="suggestVariables") + public String getArguments() { + return arguments; + } + + public void setArguments(String arguments) { + this.arguments = arguments; + } + + @Editable(order=300, name="Environment Variables", description="Optionally specify environment variables of " + + "the service") + public List getEnvVars() { + return envVars; + } + + public void setEnvVars(List envVars) { + this.envVars = envVars; + } + + @Editable(order=400, description="Specify command to check readiness of the service. This command will " + + "be interpretated by cmd.exe on Windows images, and by shell on Linux images. It will be " + + "executed repeatedly until a zero code is returned to indicate service ready") + @Interpolative(variableSuggester="suggestVariables") + @NotEmpty + public String getReadinessCheckCommand() { + return readinessCheckCommand; + } + + public void setReadinessCheckCommand(String readinessCheckCommand) { + this.readinessCheckCommand = readinessCheckCommand; + } + + @SuppressWarnings("unused") + private static List suggestVariables(String matchWith) { + return BuildSpec.suggestVariables(matchWith, false, false, false); + } + + public Map toMap() { + Map serviceMap = new HashMap<>(); + + serviceMap.put("name", getName()); + serviceMap.put("image", getImage()); + serviceMap.put("readinessCheckCommand", getReadinessCheckCommand()); + serviceMap.put("arguments", getArguments()); + Map envVars = new HashMap<>(); + for (EnvVar var: getEnvVars()) + envVars.put(var.getName(), var.getValue()); + serviceMap.put("envVars", (Serializable) envVars); + + return serviceMap; + } + +} diff --git a/server-core/src/main/java/io/onedev/server/buildspec/job/Job.java b/server-core/src/main/java/io/onedev/server/buildspec/job/Job.java index 5e8609d1a9..edb804fa5b 100644 --- a/server-core/src/main/java/io/onedev/server/buildspec/job/Job.java +++ b/server-core/src/main/java/io/onedev/server/buildspec/job/Job.java @@ -1,496 +1,473 @@ -package io.onedev.server.buildspec.job; - -import static io.onedev.server.model.Build.NAME_BRANCH; -import static io.onedev.server.model.Build.NAME_COMMIT; -import static io.onedev.server.model.Build.NAME_JOB; -import static io.onedev.server.model.Build.NAME_PULL_REQUEST; -import static io.onedev.server.model.Build.NAME_TAG; -import static io.onedev.server.search.entity.build.BuildQuery.getRuleName; -import static io.onedev.server.search.entity.build.BuildQueryLexer.And; -import static io.onedev.server.search.entity.build.BuildQueryLexer.InPipelineOf; -import static io.onedev.server.search.entity.build.BuildQueryLexer.Is; - -import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import javax.annotation.Nullable; -import javax.validation.ConstraintValidatorContext; -import javax.validation.Valid; -import javax.validation.constraints.Min; -import javax.validation.constraints.NotEmpty; - -import org.apache.wicket.Component; -import org.eclipse.jgit.lib.ObjectId; - -import io.onedev.commons.codeassist.InputCompletion; -import io.onedev.commons.codeassist.InputStatus; -import io.onedev.commons.codeassist.InputSuggestion; -import io.onedev.server.OneDev; -import io.onedev.server.buildspec.BuildSpec; -import io.onedev.server.buildspec.BuildSpecAware; -import io.onedev.server.buildspec.NamedElement; -import io.onedev.server.buildspec.job.action.PostBuildAction; -import io.onedev.server.buildspec.job.projectdependency.ProjectDependency; -import io.onedev.server.buildspec.job.trigger.JobTrigger; -import io.onedev.server.buildspec.param.ParamUtils; -import io.onedev.server.buildspec.param.spec.ParamSpec; -import io.onedev.server.buildspec.step.Step; -import io.onedev.server.entitymanager.SettingManager; -import io.onedev.server.event.project.ProjectEvent; -import io.onedev.server.git.GitUtils; -import io.onedev.server.job.authorization.JobAuthorization; -import io.onedev.server.job.authorization.JobAuthorization.Context; -import io.onedev.server.model.Build; -import io.onedev.server.model.PullRequest; -import io.onedev.server.model.support.administration.jobexecutor.JobExecutor; -import io.onedev.server.util.ComponentContext; -import io.onedev.server.util.EditContext; -import io.onedev.server.util.criteria.Criteria; -import io.onedev.server.util.validation.Validatable; -import io.onedev.server.util.validation.annotation.ClassValidating; -import io.onedev.server.web.editable.annotation.ChoiceProvider; -import io.onedev.server.web.editable.annotation.Editable; -import io.onedev.server.web.editable.annotation.Interpolative; -import io.onedev.server.web.editable.annotation.RetryCondition; -import io.onedev.server.web.editable.annotation.SuggestionProvider; -import io.onedev.server.web.page.project.blob.ProjectBlobPage; -import io.onedev.server.web.util.SuggestionUtils; -import io.onedev.server.web.util.WicketUtils; - -@Editable -@ClassValidating -public class Job implements NamedElement, Serializable, Validatable { - - private static final long serialVersionUID = 1L; - - public static final String SELECTION_PREFIX = "jobs/"; - - public static final String PROP_NAME = "name"; - - public static final String PROP_JOB_DEPENDENCIES = "jobDependencies"; - - public static final String PROP_REQUIRED_SERVICES = "requiredServices"; - - public static final String PROP_TRIGGERS = "triggers"; - - public static final String PROP_STEPS = "steps"; - - public static final String PROP_RETRY_CONDITION = "retryCondition"; - - public static final String PROP_POST_BUILD_ACTIONS = "postBuildActions"; - - private String name; - - private String jobExecutor; - - private List steps = new ArrayList<>(); - - private List paramSpecs = new ArrayList<>(); - - private List jobDependencies = new ArrayList<>(); - - private List projectDependencies = new ArrayList<>(); - - private List requiredServices = new ArrayList<>(); - - private List triggers = new ArrayList<>(); - - private List caches = new ArrayList<>(); - - private int cpuRequirement = 250; - - private int memoryRequirement = 256; - - private long timeout = 3600; - - private List postBuildActions = new ArrayList<>(); - - private String retryCondition = "never"; - - private int maxRetries = 3; - - private int retryDelay = 30; - - private transient Map paramSpecMap; - - @Editable(order=100, description="Specify name of the job") - @SuggestionProvider("getNameSuggestions") - @NotEmpty - @Override - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - @SuppressWarnings("unused") - private static List getNameSuggestions(InputStatus status) { - BuildSpec buildSpec = BuildSpec.get(); - if (buildSpec != null) { - List candidates = new ArrayList<>(buildSpec.getJobMap().keySet()); - buildSpec.getJobs().forEach(it->candidates.remove(it.getName())); - return BuildSpec.suggestOverrides(candidates, status); - } - return new ArrayList<>(); - } - - @Editable(order=200, placeholder="Use Any Applicable Executor", description="Optionally specify authorized executor " - + "for this job. Leave empty to use first authorized executor") - @Interpolative(literalSuggester="suggestJobExecutors", variableSuggester="suggestVariables") - public String getJobExecutor() { - return jobExecutor; - } - - public void setJobExecutor(String jobExecutor) { - this.jobExecutor = jobExecutor; - } - - @SuppressWarnings("unused") - private static List suggestJobExecutors(String matchWith) { - List applicableJobExecutors = new ArrayList<>(); - ProjectBlobPage page = (ProjectBlobPage) WicketUtils.getPage(); - String jobName = (String) EditContext.get().getInputValue(PROP_NAME); - if (jobName != null) { - Context context = new Context(page.getProject(), page.getBlobIdent().revision, jobName); - for (JobExecutor executor: OneDev.getInstance(SettingManager.class).getJobExecutors()) { - if (executor.isEnabled()) { - if (executor.getJobAuthorization() == null) { - applicableJobExecutors.add(executor.getName()); - } else { - if (JobAuthorization.parse(executor.getJobAuthorization()).matches(context)) - applicableJobExecutors.add(executor.getName()); - } - } - } - } - - return SuggestionUtils.suggest(applicableJobExecutors, matchWith); - } - - @Editable(order=200, description="Steps will be executed serially on same node, sharing the same job workspace") - public List getSteps() { - return steps; - } - - public void setSteps(List steps) { - this.steps = steps; - } - - @Editable(order=400, name="Parameter Specs", group="Params & Triggers", description="Optionally define parameter specifications of the job") - @Valid - public List getParamSpecs() { - return paramSpecs; - } - - public void setParamSpecs(List paramSpecs) { - this.paramSpecs = paramSpecs; - } - - @Editable(order=500, group="Params & Triggers", description="Use triggers to run the job automatically under certain conditions") - @Valid - public List getTriggers() { - return triggers; - } - - public void setTriggers(List triggers) { - this.triggers = triggers; - } - - @Editable(name="Job Dependencies", order=9110, group="Dependencies & Services", description="Job dependencies determines the order and " - + "concurrency when run different jobs. You may also specify artifacts to retrieve from upstream jobs") - @Valid - public List getJobDependencies() { - return jobDependencies; - } - - public void setJobDependencies(List jobDependencies) { - this.jobDependencies = jobDependencies; - } - - @Editable(name="Project Dependencies", order=9112, group="Dependencies & Services", description="Use project dependency to retrieve " - + "artifacts from other projects") - @Valid - public List getProjectDependencies() { - return projectDependencies; - } - - public void setProjectDependencies(List projectDependencies) { - this.projectDependencies = projectDependencies; - } - - @Editable(order=9114, group="Dependencies & Services", placeholder="No required services", - description="Optionally specify services required by this job. " - + "NOTE: Services are only supported by docker aware executors " - + "(server docker executor, remote docker executor, or kubernetes executor)") - @ChoiceProvider("getServiceChoices") - public List getRequiredServices() { - return requiredServices; - } - - public void setRequiredServices(List requiredServices) { - this.requiredServices = requiredServices; - } - - @SuppressWarnings("unused") - private static List getServiceChoices() { - List choices = new ArrayList<>(); - Component component = ComponentContext.get().getComponent(); - BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class); - if (buildSpecAware != null) { - BuildSpec buildSpec = buildSpecAware.getBuildSpec(); - if (buildSpec != null) { - choices.addAll(buildSpec.getServiceMap().values().stream() - .map(it->it.getName()).collect(Collectors.toList())); - } - } - return choices; - } - - @Editable(order=9400, group="More Settings", description="Specify condition to retry build upon failure") - @NotEmpty - @RetryCondition - public String getRetryCondition() { - return retryCondition; - } - - public void setRetryCondition(String retryCondition) { - this.retryCondition = retryCondition; - } - - @Editable(order=9410, group="More Settings", description="Maximum of retries before giving up") - @Min(value=1, message="This value should not be less than 1") - public int getMaxRetries() { - return maxRetries; - } - - public void setMaxRetries(int maxRetries) { - this.maxRetries = maxRetries; - } - - @Editable(order=9420, group="More Settings", description="Delay for the first retry in seconds. " - + "Delay of subsequent retries will be calculated using an exponential back-off " - + "based on this delay") - @Min(value=1, message="This value should not be less than 1") - public int getRetryDelay() { - return retryDelay; - } - - public void setRetryDelay(int retryDelay) { - this.retryDelay = retryDelay; - } - - @Editable(order=10050, name="CPU Requirement", group="More Settings", description="Specify CPU requirement of the job in millis. " - + "1000 millis means a single CPU core") - public int getCpuRequirement() { - return cpuRequirement; - } - - public void setCpuRequirement(int cpuRequirement) { - this.cpuRequirement = cpuRequirement; - } - - @Editable(order=10060, group="More Settings", description="Specify memory requirement of the job in mega bytes") - public int getMemoryRequirement() { - return memoryRequirement; - } - - public void setMemoryRequirement(int memoryRequirement) { - this.memoryRequirement = memoryRequirement; - } - - @Editable(order=10100, group="More Settings", description="Cache specific paths to speed up job execution. " - + "For instance for Java Maven projects executed by various docker executors, you may cache folder " - + "/root/.m2/repository to avoid downloading dependencies for subsequent executions.
" - + "WARNING: When using cache, malicious jobs running with same job executor " - + "can read or even pollute the cache intentionally using same cache key as yours. To avoid this " - + "issue, make sure job executor executing your job can only be used by trusted jobs via job " - + "authorization setting") - @Valid - public List getCaches() { - return caches; - } - - public void setCaches(List caches) { - this.caches = caches; - } - - @Editable(order=10500, group="More Settings", description="Specify timeout in seconds") - public long getTimeout() { - return timeout; - } - - public void setTimeout(long timeout) { - this.timeout = timeout; - } - - @Editable(order=10600, name="Post Build Actions", group="More Settings") - @Valid - public List getPostBuildActions() { - return postBuildActions; - } - - public void setPostBuildActions(List postBuildActions) { - this.postBuildActions = postBuildActions; - } - - @Nullable - public JobTriggerMatch getTriggerMatch(ProjectEvent event) { - for (JobTrigger trigger: getTriggers()) { - SubmitReason reason = trigger.matches(event, this); - if (reason != null) - return new JobTriggerMatch(trigger, reason); - } - return null; - } - - @Override - public boolean isValid(ConstraintValidatorContext context) { - boolean isValid = true; - - Set keys = new HashSet<>(); - Set paths = new HashSet<>(); - for (CacheSpec cache: caches) { - if (!keys.add(cache.getKey())) { - isValid = false; - context.buildConstraintViolationWithTemplate("Duplicate key (" + cache.getKey() + ")") - .addPropertyNode("caches").addConstraintViolation(); - } - if (!paths.add(cache.getPath())) { - isValid = false; - context.buildConstraintViolationWithTemplate("Duplicate path (" + cache.getPath() + ")") - .addPropertyNode("caches").addConstraintViolation(); - } - } - - Set dependencyJobNames = new HashSet<>(); - for (JobDependency dependency: jobDependencies) { - if (!dependencyJobNames.add(dependency.getJobName())) { - isValid = false; - context.buildConstraintViolationWithTemplate("Duplicate dependency (" + dependency.getJobName() + ")") - .addPropertyNode("jobDependencies").addConstraintViolation(); - } - } - - Set dependencyProjectPaths = new HashSet<>(); - for (ProjectDependency dependency: projectDependencies) { - if (!dependencyProjectPaths.add(dependency.getProjectPath())) { - isValid = false; - context.buildConstraintViolationWithTemplate("Duplicate dependency (" + dependency.getProjectPath() + ")") - .addPropertyNode("projectDependencies").addConstraintViolation(); - } - } - - Set paramSpecNames = new HashSet<>(); - for (ParamSpec paramSpec: paramSpecs) { - if (!paramSpecNames.add(paramSpec.getName())) { - isValid = false; - context.buildConstraintViolationWithTemplate("Duplicate parameter spec (" + paramSpec.getName() + ")") - .addPropertyNode("paramSpecs").addConstraintViolation(); - } - } - - if (getRetryCondition() != null) { - try { - io.onedev.server.buildspec.job.retrycondition.RetryCondition.parse(this, getRetryCondition()); - } catch (Exception e) { - String message = e.getMessage(); - if (message == null) - message = "Malformed retry condition"; - context.buildConstraintViolationWithTemplate(message) - .addPropertyNode(PROP_RETRY_CONDITION) - .addConstraintViolation(); - isValid = false; - } - } - - if (isValid) { - for (int triggerIndex=0; triggerIndex getParamSpecMap() { - if (paramSpecMap == null) - paramSpecMap = ParamUtils.getParamSpecMap(paramSpecs); - return paramSpecMap; - } - - public static String getBuildQuery(ObjectId commitId, String jobName, - @Nullable Build pipelineOf, @Nullable String refName, @Nullable PullRequest request) { - String query = "" - + Criteria.quote(NAME_COMMIT) + " " + getRuleName(Is) + " " + Criteria.quote(commitId.name()) - + " " + getRuleName(And) + " " - + Criteria.quote(NAME_JOB) + " " + getRuleName(Is) + " " + Criteria.quote(jobName); - if (pipelineOf != null) - query = query + " " + getRuleName(And) + " " + getRuleName(InPipelineOf) + " " + Criteria.quote("#" + pipelineOf.getNumber()); - if (request != null) { - query = query - + " " + getRuleName(And) + " " - + Criteria.quote(NAME_PULL_REQUEST) + " " + getRuleName(Is) + " " + Criteria.quote("#" + request.getNumber()); - } - if (refName != null) { - String branch = GitUtils.ref2branch(refName); - if (branch != null) { - query = query - + " " + getRuleName(And) + " " - + Criteria.quote(NAME_BRANCH) + " " + getRuleName(Is) + " " + Criteria.quote(branch); - } - String tag = GitUtils.ref2tag(refName); - if (tag != null) { - query = query - + " " + getRuleName(And) + " " - + Criteria.quote(NAME_TAG) + " " + getRuleName(Is) + " " + Criteria.quote(tag); - } - } - return query; - } - - public static List getChoices() { - List choices = new ArrayList<>(); - Component component = ComponentContext.get().getComponent(); - BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class); - if (buildSpecAware != null) { - BuildSpec buildSpec = buildSpecAware.getBuildSpec(); - if (buildSpec != null) { - choices.addAll(buildSpec.getJobMap().values().stream() - .map(it->it.getName()).collect(Collectors.toList())); - } - JobAware jobAware = WicketUtils.findInnermost(component, JobAware.class); - if (jobAware != null) { - Job job = jobAware.getJob(); - if (job != null) - choices.remove(job.getName()); - } - } - return choices; - } - - @SuppressWarnings("unused") - private static List suggestVariables(String matchWith) { - return BuildSpec.suggestVariables(matchWith, false, false, false); - } - -} +package io.onedev.server.buildspec.job; + +import static io.onedev.server.model.Build.NAME_BRANCH; +import static io.onedev.server.model.Build.NAME_COMMIT; +import static io.onedev.server.model.Build.NAME_JOB; +import static io.onedev.server.model.Build.NAME_PULL_REQUEST; +import static io.onedev.server.model.Build.NAME_TAG; +import static io.onedev.server.search.entity.build.BuildQuery.getRuleName; +import static io.onedev.server.search.entity.build.BuildQueryLexer.And; +import static io.onedev.server.search.entity.build.BuildQueryLexer.InPipelineOf; +import static io.onedev.server.search.entity.build.BuildQueryLexer.Is; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import javax.annotation.Nullable; +import javax.validation.ConstraintValidatorContext; +import javax.validation.Valid; +import javax.validation.constraints.Min; +import javax.validation.constraints.NotEmpty; + +import org.apache.wicket.Component; +import org.eclipse.jgit.lib.ObjectId; + +import io.onedev.commons.codeassist.InputCompletion; +import io.onedev.commons.codeassist.InputStatus; +import io.onedev.commons.codeassist.InputSuggestion; +import io.onedev.server.OneDev; +import io.onedev.server.buildspec.BuildSpec; +import io.onedev.server.buildspec.BuildSpecAware; +import io.onedev.server.buildspec.NamedElement; +import io.onedev.server.buildspec.job.action.PostBuildAction; +import io.onedev.server.buildspec.job.projectdependency.ProjectDependency; +import io.onedev.server.buildspec.job.trigger.JobTrigger; +import io.onedev.server.buildspec.param.ParamUtils; +import io.onedev.server.buildspec.param.spec.ParamSpec; +import io.onedev.server.buildspec.step.Step; +import io.onedev.server.entitymanager.SettingManager; +import io.onedev.server.event.project.ProjectEvent; +import io.onedev.server.git.GitUtils; +import io.onedev.server.job.authorization.JobAuthorization; +import io.onedev.server.job.authorization.JobAuthorization.Context; +import io.onedev.server.model.Build; +import io.onedev.server.model.PullRequest; +import io.onedev.server.model.support.administration.jobexecutor.JobExecutor; +import io.onedev.server.util.ComponentContext; +import io.onedev.server.util.EditContext; +import io.onedev.server.util.criteria.Criteria; +import io.onedev.server.util.validation.Validatable; +import io.onedev.server.util.validation.annotation.ClassValidating; +import io.onedev.server.web.editable.annotation.ChoiceProvider; +import io.onedev.server.web.editable.annotation.Editable; +import io.onedev.server.web.editable.annotation.Interpolative; +import io.onedev.server.web.editable.annotation.RetryCondition; +import io.onedev.server.web.editable.annotation.SuggestionProvider; +import io.onedev.server.web.page.project.blob.ProjectBlobPage; +import io.onedev.server.web.util.SuggestionUtils; +import io.onedev.server.web.util.WicketUtils; + +@Editable +@ClassValidating +public class Job implements NamedElement, Serializable, Validatable { + + private static final long serialVersionUID = 1L; + + public static final String SELECTION_PREFIX = "jobs/"; + + public static final String PROP_NAME = "name"; + + public static final String PROP_JOB_DEPENDENCIES = "jobDependencies"; + + public static final String PROP_REQUIRED_SERVICES = "requiredServices"; + + public static final String PROP_TRIGGERS = "triggers"; + + public static final String PROP_STEPS = "steps"; + + public static final String PROP_RETRY_CONDITION = "retryCondition"; + + public static final String PROP_POST_BUILD_ACTIONS = "postBuildActions"; + + private String name; + + private String jobExecutor; + + private List steps = new ArrayList<>(); + + private List paramSpecs = new ArrayList<>(); + + private List jobDependencies = new ArrayList<>(); + + private List projectDependencies = new ArrayList<>(); + + private List requiredServices = new ArrayList<>(); + + private List triggers = new ArrayList<>(); + + private List caches = new ArrayList<>(); + + private long timeout = 3600; + + private List postBuildActions = new ArrayList<>(); + + private String retryCondition = "never"; + + private int maxRetries = 3; + + private int retryDelay = 30; + + private transient Map paramSpecMap; + + @Editable(order=100, description="Specify name of the job") + @SuggestionProvider("getNameSuggestions") + @NotEmpty + @Override + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @SuppressWarnings("unused") + private static List getNameSuggestions(InputStatus status) { + BuildSpec buildSpec = BuildSpec.get(); + if (buildSpec != null) { + List candidates = new ArrayList<>(buildSpec.getJobMap().keySet()); + buildSpec.getJobs().forEach(it->candidates.remove(it.getName())); + return BuildSpec.suggestOverrides(candidates, status); + } + return new ArrayList<>(); + } + + @Editable(order=200, placeholder="Use Any Applicable Executor", description="Optionally specify authorized executor " + + "for this job. Leave empty to use first authorized executor") + @Interpolative(literalSuggester="suggestJobExecutors", variableSuggester="suggestVariables") + public String getJobExecutor() { + return jobExecutor; + } + + public void setJobExecutor(String jobExecutor) { + this.jobExecutor = jobExecutor; + } + + @SuppressWarnings("unused") + private static List suggestJobExecutors(String matchWith) { + List applicableJobExecutors = new ArrayList<>(); + ProjectBlobPage page = (ProjectBlobPage) WicketUtils.getPage(); + String jobName = (String) EditContext.get().getInputValue(PROP_NAME); + if (jobName != null) { + Context context = new Context(page.getProject(), page.getBlobIdent().revision, jobName); + for (JobExecutor executor: OneDev.getInstance(SettingManager.class).getJobExecutors()) { + if (executor.isEnabled()) { + if (executor.getJobAuthorization() == null) { + applicableJobExecutors.add(executor.getName()); + } else { + if (JobAuthorization.parse(executor.getJobAuthorization()).matches(context)) + applicableJobExecutors.add(executor.getName()); + } + } + } + } + + return SuggestionUtils.suggest(applicableJobExecutors, matchWith); + } + + @Editable(order=200, description="Steps will be executed serially on same node, sharing the same job workspace") + public List getSteps() { + return steps; + } + + public void setSteps(List steps) { + this.steps = steps; + } + + @Editable(order=400, name="Parameter Specs", group="Params & Triggers", description="Optionally define parameter specifications of the job") + @Valid + public List getParamSpecs() { + return paramSpecs; + } + + public void setParamSpecs(List paramSpecs) { + this.paramSpecs = paramSpecs; + } + + @Editable(order=500, group="Params & Triggers", description="Use triggers to run the job automatically under certain conditions") + @Valid + public List getTriggers() { + return triggers; + } + + public void setTriggers(List triggers) { + this.triggers = triggers; + } + + @Editable(name="Job Dependencies", order=9110, group="Dependencies & Services", description="Job dependencies determines the order and " + + "concurrency when run different jobs. You may also specify artifacts to retrieve from upstream jobs") + @Valid + public List getJobDependencies() { + return jobDependencies; + } + + public void setJobDependencies(List jobDependencies) { + this.jobDependencies = jobDependencies; + } + + @Editable(name="Project Dependencies", order=9112, group="Dependencies & Services", description="Use project dependency to retrieve " + + "artifacts from other projects") + @Valid + public List getProjectDependencies() { + return projectDependencies; + } + + public void setProjectDependencies(List projectDependencies) { + this.projectDependencies = projectDependencies; + } + + @Editable(order=9114, group="Dependencies & Services", placeholder="No required services", + description="Optionally specify services required by this job. " + + "NOTE: Services are only supported by docker aware executors " + + "(server docker executor, remote docker executor, or kubernetes executor)") + @ChoiceProvider("getServiceChoices") + public List getRequiredServices() { + return requiredServices; + } + + public void setRequiredServices(List requiredServices) { + this.requiredServices = requiredServices; + } + + @SuppressWarnings("unused") + private static List getServiceChoices() { + List choices = new ArrayList<>(); + Component component = ComponentContext.get().getComponent(); + BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class); + if (buildSpecAware != null) { + BuildSpec buildSpec = buildSpecAware.getBuildSpec(); + if (buildSpec != null) { + choices.addAll(buildSpec.getServiceMap().values().stream() + .map(it->it.getName()).collect(Collectors.toList())); + } + } + return choices; + } + + @Editable(order=9400, group="More Settings", description="Specify condition to retry build upon failure") + @NotEmpty + @RetryCondition + public String getRetryCondition() { + return retryCondition; + } + + public void setRetryCondition(String retryCondition) { + this.retryCondition = retryCondition; + } + + @Editable(order=9410, group="More Settings", description="Maximum of retries before giving up") + @Min(value=1, message="This value should not be less than 1") + public int getMaxRetries() { + return maxRetries; + } + + public void setMaxRetries(int maxRetries) { + this.maxRetries = maxRetries; + } + + @Editable(order=9420, group="More Settings", description="Delay for the first retry in seconds. " + + "Delay of subsequent retries will be calculated using an exponential back-off " + + "based on this delay") + @Min(value=1, message="This value should not be less than 1") + public int getRetryDelay() { + return retryDelay; + } + + public void setRetryDelay(int retryDelay) { + this.retryDelay = retryDelay; + } + + @Editable(order=10100, group="More Settings", description="Cache specific paths to speed up job execution. " + + "For instance for Java Maven projects executed by various docker executors, you may cache folder " + + "/root/.m2/repository to avoid downloading dependencies for subsequent executions.
" + + "WARNING: When using cache, malicious jobs running with same job executor " + + "can read or even pollute the cache intentionally using same cache key as yours. To avoid this " + + "issue, make sure job executor executing your job can only be used by trusted jobs via job " + + "authorization setting") + @Valid + public List getCaches() { + return caches; + } + + public void setCaches(List caches) { + this.caches = caches; + } + + @Editable(order=10500, group="More Settings", description="Specify timeout in seconds") + public long getTimeout() { + return timeout; + } + + public void setTimeout(long timeout) { + this.timeout = timeout; + } + + @Editable(order=10600, name="Post Build Actions", group="More Settings") + @Valid + public List getPostBuildActions() { + return postBuildActions; + } + + public void setPostBuildActions(List postBuildActions) { + this.postBuildActions = postBuildActions; + } + + @Nullable + public JobTriggerMatch getTriggerMatch(ProjectEvent event) { + for (JobTrigger trigger: getTriggers()) { + SubmitReason reason = trigger.matches(event, this); + if (reason != null) + return new JobTriggerMatch(trigger, reason); + } + return null; + } + + @Override + public boolean isValid(ConstraintValidatorContext context) { + boolean isValid = true; + + Set keys = new HashSet<>(); + Set paths = new HashSet<>(); + for (CacheSpec cache: caches) { + if (!keys.add(cache.getKey())) { + isValid = false; + context.buildConstraintViolationWithTemplate("Duplicate key (" + cache.getKey() + ")") + .addPropertyNode("caches").addConstraintViolation(); + } + if (!paths.add(cache.getPath())) { + isValid = false; + context.buildConstraintViolationWithTemplate("Duplicate path (" + cache.getPath() + ")") + .addPropertyNode("caches").addConstraintViolation(); + } + } + + Set dependencyJobNames = new HashSet<>(); + for (JobDependency dependency: jobDependencies) { + if (!dependencyJobNames.add(dependency.getJobName())) { + isValid = false; + context.buildConstraintViolationWithTemplate("Duplicate dependency (" + dependency.getJobName() + ")") + .addPropertyNode("jobDependencies").addConstraintViolation(); + } + } + + Set dependencyProjectPaths = new HashSet<>(); + for (ProjectDependency dependency: projectDependencies) { + if (!dependencyProjectPaths.add(dependency.getProjectPath())) { + isValid = false; + context.buildConstraintViolationWithTemplate("Duplicate dependency (" + dependency.getProjectPath() + ")") + .addPropertyNode("projectDependencies").addConstraintViolation(); + } + } + + Set paramSpecNames = new HashSet<>(); + for (ParamSpec paramSpec: paramSpecs) { + if (!paramSpecNames.add(paramSpec.getName())) { + isValid = false; + context.buildConstraintViolationWithTemplate("Duplicate parameter spec (" + paramSpec.getName() + ")") + .addPropertyNode("paramSpecs").addConstraintViolation(); + } + } + + if (getRetryCondition() != null) { + try { + io.onedev.server.buildspec.job.retrycondition.RetryCondition.parse(this, getRetryCondition()); + } catch (Exception e) { + String message = e.getMessage(); + if (message == null) + message = "Malformed retry condition"; + context.buildConstraintViolationWithTemplate(message) + .addPropertyNode(PROP_RETRY_CONDITION) + .addConstraintViolation(); + isValid = false; + } + } + + if (isValid) { + for (int triggerIndex=0; triggerIndex getParamSpecMap() { + if (paramSpecMap == null) + paramSpecMap = ParamUtils.getParamSpecMap(paramSpecs); + return paramSpecMap; + } + + public static String getBuildQuery(ObjectId commitId, String jobName, + @Nullable Build pipelineOf, @Nullable String refName, @Nullable PullRequest request) { + String query = "" + + Criteria.quote(NAME_COMMIT) + " " + getRuleName(Is) + " " + Criteria.quote(commitId.name()) + + " " + getRuleName(And) + " " + + Criteria.quote(NAME_JOB) + " " + getRuleName(Is) + " " + Criteria.quote(jobName); + if (pipelineOf != null) + query = query + " " + getRuleName(And) + " " + getRuleName(InPipelineOf) + " " + Criteria.quote("#" + pipelineOf.getNumber()); + if (request != null) { + query = query + + " " + getRuleName(And) + " " + + Criteria.quote(NAME_PULL_REQUEST) + " " + getRuleName(Is) + " " + Criteria.quote("#" + request.getNumber()); + } + if (refName != null) { + String branch = GitUtils.ref2branch(refName); + if (branch != null) { + query = query + + " " + getRuleName(And) + " " + + Criteria.quote(NAME_BRANCH) + " " + getRuleName(Is) + " " + Criteria.quote(branch); + } + String tag = GitUtils.ref2tag(refName); + if (tag != null) { + query = query + + " " + getRuleName(And) + " " + + Criteria.quote(NAME_TAG) + " " + getRuleName(Is) + " " + Criteria.quote(tag); + } + } + return query; + } + + public static List getChoices() { + List choices = new ArrayList<>(); + Component component = ComponentContext.get().getComponent(); + BuildSpecAware buildSpecAware = WicketUtils.findInnermost(component, BuildSpecAware.class); + if (buildSpecAware != null) { + BuildSpec buildSpec = buildSpecAware.getBuildSpec(); + if (buildSpec != null) { + choices.addAll(buildSpec.getJobMap().values().stream() + .map(it->it.getName()).collect(Collectors.toList())); + } + JobAware jobAware = WicketUtils.findInnermost(component, JobAware.class); + if (jobAware != null) { + Job job = jobAware.getJob(); + if (job != null) + choices.remove(job.getName()); + } + } + return choices; + } + + @SuppressWarnings("unused") + private static List suggestVariables(String matchWith) { + return BuildSpec.suggestVariables(matchWith, false, false, false); + } + +} diff --git a/server-core/src/main/java/io/onedev/server/entitymanager/AgentManager.java b/server-core/src/main/java/io/onedev/server/entitymanager/AgentManager.java index 3bf6686f7d..73759f5932 100644 --- a/server-core/src/main/java/io/onedev/server/entitymanager/AgentManager.java +++ b/server-core/src/main/java/io/onedev/server/entitymanager/AgentManager.java @@ -29,9 +29,9 @@ public interface AgentManager extends EntityManager { Map getAgentServers(); - List getOsNames(); + Collection getOsNames(); - List getOsArchs(); + Collection getOsArchs(); List query(EntityQuery agentQuery, int firstResult, int maxResults); diff --git a/server-core/src/main/java/io/onedev/server/entitymanager/impl/DefaultAgentManager.java b/server-core/src/main/java/io/onedev/server/entitymanager/impl/DefaultAgentManager.java index e19a69effb..3eab7319bb 100644 --- a/server-core/src/main/java/io/onedev/server/entitymanager/impl/DefaultAgentManager.java +++ b/server-core/src/main/java/io/onedev/server/entitymanager/impl/DefaultAgentManager.java @@ -1,39 +1,9 @@ package io.onedev.server.entitymanager.impl; -import java.io.IOException; -import java.io.InputStream; -import java.io.ObjectStreamException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.UUID; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeoutException; -import java.util.stream.Collectors; - -import javax.inject.Inject; -import javax.inject.Singleton; -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.Root; - -import org.apache.commons.lang.SerializationUtils; -import org.apache.commons.lang3.StringUtils; -import org.eclipse.jetty.websocket.api.Session; -import org.hibernate.criterion.Restrictions; -import org.hibernate.query.Query; - import com.google.common.base.Splitter; import com.hazelcast.cluster.MembershipEvent; import com.hazelcast.cluster.MembershipListener; import com.hazelcast.core.HazelcastInstance; - -import edu.emory.mathcs.backport.java.util.Collections; import io.onedev.agent.AgentData; import io.onedev.agent.Message; import io.onedev.agent.MessageTypes; @@ -68,6 +38,25 @@ import io.onedev.server.search.entity.EntitySort.Direction; import io.onedev.server.search.entity.agent.AgentQuery; import io.onedev.server.util.criteria.Criteria; import io.onedev.server.util.validation.AttributeNameValidator; +import org.apache.commons.lang.SerializationUtils; +import org.apache.commons.lang3.StringUtils; +import org.eclipse.jetty.websocket.api.Session; +import org.hibernate.criterion.Restrictions; +import org.hibernate.query.Query; + +import javax.inject.Inject; +import javax.inject.Singleton; +import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.CriteriaQuery; +import javax.persistence.criteria.Root; +import java.io.IOException; +import java.io.InputStream; +import java.io.ObjectStreamException; +import java.io.Serializable; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeoutException; +import java.util.stream.Collectors; @Singleton public class DefaultAgentManager extends BaseEntityManager implements AgentManager, Serializable { @@ -200,8 +189,7 @@ public class DefaultAgentManager extends BaseEntityManager implements Age agent.setOsVersion(data.getOsInfo().getOsVersion()); agent.setOsArch(data.getOsInfo().getOsArch()); agent.setName(data.getName()); - agent.setCpu(data.getCpu()); - agent.setMemory(data.getMemory()); + agent.setCpus(data.getCpus()); agent.setTemporal(data.isTemporal()); agent.setIpAddress(data.getIpAddress()); save(agent); @@ -221,8 +209,7 @@ public class DefaultAgentManager extends BaseEntityManager implements Age agent.setOsVersion(data.getOsInfo().getOsVersion()); agent.setOsArch(data.getOsInfo().getOsArch()); agent.setIpAddress(data.getIpAddress()); - agent.setCpu(data.getCpu()); - agent.setMemory(data.getMemory()); + agent.setCpus(data.getCpus()); agent.setTemporal(data.isTemporal()); save(agent); attributeManager.syncAttributes(agent, data.getAttributes()); @@ -290,23 +277,19 @@ public class DefaultAgentManager extends BaseEntityManager implements Age @Override public Map getAgentServers() { - return new HashMap<>(agentServers); + return agentServers; } @Sessional @Override - public List getOsNames() { - List osNames = new ArrayList<>(this.osNames.keySet()); - Collections.sort(osNames); - return osNames; + public Collection getOsNames() { + return osNames.keySet(); } @Sessional @Override - public List getOsArchs() { - List osArchs = new ArrayList<>(this.osArchs.keySet()); - Collections.sort(osArchs); - return osArchs; + public Collection getOsArchs() { + return osArchs.keySet(); } private CriteriaQuery buildCriteriaQuery(org.hibernate.Session session, EntityQuery agentQuery) { diff --git a/server-core/src/main/java/io/onedev/server/job/AgentInfo.java b/server-core/src/main/java/io/onedev/server/job/AgentInfo.java deleted file mode 100644 index 9d16bef81c..0000000000 --- a/server-core/src/main/java/io/onedev/server/job/AgentInfo.java +++ /dev/null @@ -1,33 +0,0 @@ -package io.onedev.server.job; - -import org.eclipse.jetty.websocket.api.Session; - -import io.onedev.agent.AgentData; - -public class AgentInfo { - - private final Long id; - - private final AgentData data; - - private final Session session; - - public AgentInfo(Long id, AgentData data, Session session) { - this.id = id; - this.data = data; - this.session = session; - } - - public Long getId() { - return id; - } - - public AgentData getData() { - return data; - } - - public Session getSession() { - return session; - } - -} diff --git a/server-core/src/main/java/io/onedev/server/job/AgentRunnable.java b/server-core/src/main/java/io/onedev/server/job/AgentRunnable.java new file mode 100644 index 0000000000..9010baee04 --- /dev/null +++ b/server-core/src/main/java/io/onedev/server/job/AgentRunnable.java @@ -0,0 +1,9 @@ +package io.onedev.server.job; + +import java.io.Serializable; + +public interface AgentRunnable extends Serializable { + + void run(Long agentId); + +} diff --git a/server-core/src/main/java/io/onedev/server/job/CancellerAwareCancellationException.java b/server-core/src/main/java/io/onedev/server/job/CancellationException.java similarity index 60% rename from server-core/src/main/java/io/onedev/server/job/CancellerAwareCancellationException.java rename to server-core/src/main/java/io/onedev/server/job/CancellationException.java index 7f9aaf380f..fa61f27dc1 100644 --- a/server-core/src/main/java/io/onedev/server/job/CancellerAwareCancellationException.java +++ b/server-core/src/main/java/io/onedev/server/job/CancellationException.java @@ -2,13 +2,13 @@ package io.onedev.server.job; import javax.annotation.Nullable; -public class CancellerAwareCancellationException extends java.util.concurrent.CancellationException { +public class CancellationException extends java.util.concurrent.CancellationException { private static final long serialVersionUID = 1L; private final Long cancellerId; - public CancellerAwareCancellationException(@Nullable Long cancellerId) { + public CancellationException(@Nullable Long cancellerId) { this.cancellerId = cancellerId; } diff --git a/server-core/src/main/java/io/onedev/server/job/DefaultJobManager.java b/server-core/src/main/java/io/onedev/server/job/DefaultJobManager.java index 5244900fb0..96d969ab8a 100644 --- a/server-core/src/main/java/io/onedev/server/job/DefaultJobManager.java +++ b/server-core/src/main/java/io/onedev/server/job/DefaultJobManager.java @@ -1,87 +1,18 @@ package io.onedev.server.job; -import static io.onedev.k8shelper.KubernetesHelper.BUILD_VERSION; -import static io.onedev.k8shelper.KubernetesHelper.replacePlaceholders; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.io.ObjectStreamException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Comparator; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.UUID; -import java.util.concurrent.Callable; -import java.util.concurrent.CancellationException; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.concurrent.locks.Lock; -import java.util.function.Consumer; -import java.util.function.Function; - -import javax.annotation.Nullable; -import javax.inject.Inject; -import javax.inject.Singleton; -import javax.servlet.http.HttpServletRequest; -import javax.validation.ConstraintViolation; -import javax.validation.Validator; -import javax.ws.rs.client.Client; -import javax.ws.rs.client.ClientBuilder; -import javax.ws.rs.client.Invocation; -import javax.ws.rs.client.WebTarget; -import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.Response; - -import org.apache.shiro.authz.UnauthorizedException; -import org.apache.shiro.subject.Subject; -import org.eclipse.jgit.lib.ObjectId; -import org.quartz.CronScheduleBuilder; -import org.quartz.ScheduleBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.google.common.base.Preconditions; import com.google.common.base.Throwables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; - import io.onedev.agent.job.FailedException; import io.onedev.commons.loader.ManagedSerializedForm; -import io.onedev.commons.utils.ExceptionUtils; -import io.onedev.commons.utils.ExplicitException; -import io.onedev.commons.utils.FileUtils; -import io.onedev.commons.utils.LockUtils; -import io.onedev.commons.utils.StringUtils; -import io.onedev.commons.utils.TaskLogger; -import io.onedev.k8shelper.Action; -import io.onedev.k8shelper.CacheAllocationRequest; -import io.onedev.k8shelper.CacheInstance; -import io.onedev.k8shelper.KubernetesHelper; -import io.onedev.k8shelper.LeafFacade; -import io.onedev.k8shelper.ServerSideFacade; +import io.onedev.commons.utils.*; +import io.onedev.k8shelper.*; import io.onedev.server.OneDev; import io.onedev.server.buildspec.BuildSpec; import io.onedev.server.buildspec.BuildSpecParseException; import io.onedev.server.buildspec.Service; -import io.onedev.server.buildspec.job.CacheSpec; -import io.onedev.server.buildspec.job.Job; -import io.onedev.server.buildspec.job.JobDependency; -import io.onedev.server.buildspec.job.JobExecutorDiscoverer; -import io.onedev.server.buildspec.job.JobTriggerMatch; -import io.onedev.server.buildspec.job.SubmitReason; +import io.onedev.server.buildspec.job.*; import io.onedev.server.buildspec.job.action.PostBuildAction; import io.onedev.server.buildspec.job.action.condition.ActionCondition; import io.onedev.server.buildspec.job.projectdependency.ProjectDependency; @@ -95,27 +26,16 @@ import io.onedev.server.buildspec.param.spec.SecretParam; import io.onedev.server.buildspec.step.ServerSideStep; import io.onedev.server.buildspec.step.Step; import io.onedev.server.cluster.ClusterManager; +import io.onedev.server.cluster.ClusterRunnable; import io.onedev.server.cluster.ClusterTask; -import io.onedev.server.entitymanager.AgentManager; -import io.onedev.server.entitymanager.BuildManager; -import io.onedev.server.entitymanager.BuildParamManager; -import io.onedev.server.entitymanager.ProjectManager; -import io.onedev.server.entitymanager.PullRequestManager; -import io.onedev.server.entitymanager.SettingManager; -import io.onedev.server.entitymanager.UserManager; +import io.onedev.server.entitymanager.*; import io.onedev.server.event.Listen; import io.onedev.server.event.ListenerRegistry; import io.onedev.server.event.project.DefaultBranchChanged; import io.onedev.server.event.project.ProjectEvent; import io.onedev.server.event.project.RefUpdated; import io.onedev.server.event.project.ScheduledTimeReaches; -import io.onedev.server.event.project.build.BuildEvent; -import io.onedev.server.event.project.build.BuildFinished; -import io.onedev.server.event.project.build.BuildPending; -import io.onedev.server.event.project.build.BuildRetrying; -import io.onedev.server.event.project.build.BuildRunning; -import io.onedev.server.event.project.build.BuildSubmitted; -import io.onedev.server.event.project.build.BuildUpdated; +import io.onedev.server.event.project.build.*; import io.onedev.server.event.project.pullrequest.PullRequestEvent; import io.onedev.server.event.system.SystemStarted; import io.onedev.server.event.system.SystemStopping; @@ -127,13 +47,8 @@ import io.onedev.server.job.authorization.JobAuthorization; import io.onedev.server.job.authorization.JobAuthorization.Context; import io.onedev.server.job.log.LogManager; import io.onedev.server.job.log.LogTask; -import io.onedev.server.model.Build; +import io.onedev.server.model.*; import io.onedev.server.model.Build.Status; -import io.onedev.server.model.BuildDependence; -import io.onedev.server.model.BuildParam; -import io.onedev.server.model.Project; -import io.onedev.server.model.PullRequest; -import io.onedev.server.model.User; import io.onedev.server.model.support.administration.jobexecutor.JobExecutor; import io.onedev.server.persistence.SessionManager; import io.onedev.server.persistence.TransactionManager; @@ -161,6 +76,37 @@ import io.onedev.server.util.script.identity.ScriptIdentity; import io.onedev.server.web.editable.EditableStringTransformer; import io.onedev.server.web.editable.EditableUtils; import io.onedev.server.web.editable.annotation.Interpolative; +import org.apache.shiro.authz.UnauthorizedException; +import org.apache.shiro.subject.Subject; +import org.eclipse.jgit.lib.ObjectId; +import org.quartz.CronScheduleBuilder; +import org.quartz.ScheduleBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import javax.inject.Inject; +import javax.inject.Singleton; +import javax.servlet.http.HttpServletRequest; +import javax.validation.ConstraintViolation; +import javax.validation.Validator; +import javax.ws.rs.client.Client; +import javax.ws.rs.client.ClientBuilder; +import javax.ws.rs.client.Invocation; +import javax.ws.rs.client.WebTarget; +import javax.ws.rs.core.HttpHeaders; +import javax.ws.rs.core.Response; +import java.io.*; +import java.util.*; +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.locks.Lock; +import java.util.function.Consumer; +import java.util.function.Function; + +import static io.onedev.k8shelper.KubernetesHelper.BUILD_VERSION; +import static io.onedev.k8shelper.KubernetesHelper.replacePlaceholders; @Singleton public class DefaultJobManager implements JobManager, Runnable, CodePullAuthorizationSource, Serializable { @@ -175,7 +121,7 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz private final Map> jobActions = new ConcurrentHashMap<>(); - private final Map jobExecutors = new ConcurrentHashMap<>(); + private final Map jobRunnables = new ConcurrentHashMap<>(); private final Map> scheduledTasks = new ConcurrentHashMap<>(); @@ -557,163 +503,142 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz String jobExecutorName = interpolator.interpolate(build.getJob().getJobExecutor()); JobExecutor jobExecutor = getJobExecutor(build, jobExecutorName, jobLogger); - if (jobExecutor != null) { - Long projectId = build.getProject().getId(); - String projectPath = build.getProject().getPath(); - String projectGitDir = storageManager.getProjectGitDir(build.getProject().getId()).getAbsolutePath(); - Long buildId = build.getId(); - Long buildNumber = build.getNumber(); - String refName = build.getRefName(); - ObjectId commitId = ObjectId.fromString(build.getCommitHash()); - BuildSpec buildSpec = build.getSpec(); - - AtomicInteger maxRetries = new AtomicInteger(0); - AtomicInteger retryDelay = new AtomicInteger(0); - List caches = new ArrayList<>(); - List services = new ArrayList<>(); - List actions = new ArrayList<>(); - - Job job; - - JobSecretAuthorizationContext.push(build.getJobSecretAuthorizationContext()); - Build.push(build); - try { - job = build.getJob(); - - for (Step step: job.getSteps()) { - step = interpolator.interpolateProperties(step); - actions.add(step.getAction(build, jobToken, build.getParamCombination())); - } - - for (CacheSpec cache: job.getCaches()) - caches.add(interpolator.interpolateProperties(cache)); - - for (String serviceName: job.getRequiredServices()) { - Service service = buildSpec.getServiceMap().get(serviceName); - services.add(interpolator.interpolateProperties(service)); - } - - maxRetries.set(job.getMaxRetries()); - retryDelay.set(job.getRetryDelay()); - } finally { - Build.pop(); - JobSecretAuthorizationContext.pop(); - } + Long projectId = build.getProject().getId(); + String projectPath = build.getProject().getPath(); + String projectGitDir = storageManager.getProjectGitDir(build.getProject().getId()).getAbsolutePath(); + Long buildId = build.getId(); + Long buildNumber = build.getNumber(); + String refName = build.getRefName(); + ObjectId commitId = ObjectId.fromString(build.getCommitHash()); + BuildSpec buildSpec = build.getSpec(); - int cpuRequirement = 0, memoryRequirement = 0; - cpuRequirement = job.getCpuRequirement(); - memoryRequirement = job.getMemoryRequirement(); + AtomicInteger maxRetries = new AtomicInteger(0); + AtomicInteger retryDelay = new AtomicInteger(0); + List caches = new ArrayList<>(); + List services = new ArrayList<>(); + List actions = new ArrayList<>(); + + Job job; + + JobSecretAuthorizationContext.push(build.getJobSecretAuthorizationContext()); + Build.push(build); + try { + job = build.getJob(); + + for (Step step: job.getSteps()) { + step = interpolator.interpolateProperties(step); + actions.add(step.getAction(build, jobToken, build.getParamCombination())); + } + + for (CacheSpec cache: job.getCaches()) + caches.add(interpolator.interpolateProperties(cache)); for (String serviceName: job.getRequiredServices()) { Service service = buildSpec.getServiceMap().get(serviceName); - if (service != null) { - cpuRequirement += service.getCpuRequirement(); - memoryRequirement += service.getMemoryRequirement(); - } + services.add(interpolator.interpolateProperties(service)); } - Map resourceRequirements = new HashMap<>(); - resourceRequirements.put(ResourceAllocator.CPU, cpuRequirement); - resourceRequirements.put(ResourceAllocator.MEMORY, memoryRequirement); + + maxRetries.set(job.getMaxRetries()); + retryDelay.set(job.getRetryDelay()); + } finally { + Build.pop(); + JobSecretAuthorizationContext.pop(); + } - AtomicReference executionRef = new AtomicReference<>(null); - executionRef.set(new JobExecution(executorService.submit(new Runnable() { + AtomicReference executionRef = new AtomicReference<>(null); + executionRef.set(new JobExecution(executorService.submit(new Runnable() { - @Override - public void run() { - AtomicInteger retried = new AtomicInteger(0); - while (true) { - JobContext jobContext = new JobContext(jobToken, jobExecutor, projectId, projectPath, - projectGitDir, buildId, buildNumber, actions, refName, commitId, caches, services, - resourceRequirements, retried.get()); - // Store original job actions as the copy in job context will be fetched from cluster and - // some transient fields (such as step object in ServerSideFacade) will not be preserved - jobActions.put(jobToken, actions); - jobContexts.put(jobToken, jobContext); - logManager.addJobLogger(jobToken, jobLogger); - serverStepThreads.put(jobToken, new ArrayList<>()); - try { - jobLogger.log("Waiting for resources..."); - resourceAllocator.run( - new JobRunnable(jobToken), jobExecutor.getAgentRequirement(), resourceRequirements); - break; - } catch (Throwable e) { - if (retried.getAndIncrement() < maxRetries.get() && sessionManager.call(new Callable() { + @Override + public void run() { + AtomicInteger retried = new AtomicInteger(0); + while (true) { + JobContext jobContext = new JobContext(jobToken, jobExecutor, projectId, projectPath, + projectGitDir, buildId, buildNumber, actions, refName, commitId, caches, services, + retried.get()); + // Store original job actions as the copy in job context will be fetched from cluster and + // some transient fields (such as step object in ServerSideFacade) will not be preserved + jobActions.put(jobToken, actions); + jobContexts.put(jobToken, jobContext); + logManager.addJobLogger(jobToken, jobLogger); + serverStepThreads.put(jobToken, new ArrayList<>()); + try { + jobExecutor.execute(jobContext); + break; + } catch (Throwable e) { + if (retried.getAndIncrement() < maxRetries.get() && sessionManager.call(new Callable() { + + @Override + public Boolean call() { + RetryCondition retryCondition = RetryCondition.parse(job, job.getRetryCondition()); + + AtomicReference errorMessage = new AtomicReference<>(null); + log(e, new TaskLogger() { + + @Override + public void log(String message, String sessionId) { + errorMessage.set(message); + } + + }); + return retryCondition.matches(new RetryContext(buildManager.load(buildId), errorMessage.get())); + } + + })) { + log(e, jobLogger); + jobLogger.warning("Job will be retried after a while..."); + transactionManager.run(new Runnable() { @Override - public Boolean call() { - RetryCondition retryCondition = RetryCondition.parse(job, job.getRetryCondition()); - - AtomicReference errorMessage = new AtomicReference<>(null); - log(e, new TaskLogger() { - - @Override - public void log(String message, String sessionId) { - errorMessage.set(message); - } - - }); - return retryCondition.matches(new RetryContext(buildManager.load(buildId), errorMessage.get())); + public void run() { + Build build = buildManager.load(buildId); + build.setRunningDate(null); + build.setPendingDate(null); + build.setRetryDate(new Date()); + build.setStatus(Status.WAITING); + listenerRegistry.post(new BuildRetrying(build)); + buildManager.save(build); } - })) { - log(e, jobLogger); - jobLogger.warning("Job will be retried after a while..."); - transactionManager.run(new Runnable() { + }); + try { + Thread.sleep(retryDelay.get() * (long)(Math.pow(2, retried.get())) * 1000L); + } catch (InterruptedException e2) { + throw new RuntimeException(e2); + } + transactionManager.run(new Runnable() { - @Override - public void run() { - Build build = buildManager.load(buildId); - build.setRunningDate(null); - build.setPendingDate(null); - build.setRetryDate(new Date()); - build.setStatus(Build.Status.WAITING); - listenerRegistry.post(new BuildRetrying(build)); - buildManager.save(build); - } - - }); - try { - Thread.sleep(retryDelay.get() * (long)(Math.pow(2, retried.get())) * 1000L); - } catch (InterruptedException e2) { - throw new RuntimeException(e2); + @Override + public void run() { + JobExecution execution = executionRef.get(); + if (execution != null) + execution.updateBeginTime(); + Build build = buildManager.load(buildId); + build.setPendingDate(new Date()); + build.setStatus(Status.PENDING); + listenerRegistry.post(new BuildPending(build)); + buildManager.save(build); } - transactionManager.run(new Runnable() { - - @Override - public void run() { - JobExecution execution = executionRef.get(); - if (execution != null) - execution.updateBeginTime(); - Build build = buildManager.load(buildId); - build.setPendingDate(new Date()); - build.setStatus(Build.Status.PENDING); - listenerRegistry.post(new BuildPending(build)); - buildManager.save(build); - } - - }); - } else { - throw ExceptionUtils.unchecked(e); - } - } finally { - Collection threads = serverStepThreads.remove(jobToken); - synchronized (threads) { - for (Thread thread: threads) - thread.interrupt(); - } - logManager.removeJobLogger(jobToken); - jobContexts.remove(jobToken); - jobActions.remove(jobToken); + + }); + } else { + throw ExceptionUtils.unchecked(e); } - } - } - - }), job.getTimeout()*1000L)); + } finally { + Collection threads = serverStepThreads.remove(jobToken); + synchronized (threads) { + for (Thread thread: threads) + thread.interrupt(); + } + logManager.removeJobLogger(jobToken); + jobContexts.remove(jobToken); + jobActions.remove(jobToken); + } + } + } - return executionRef.get(); - } else { - throw new ExplicitException("No applicable job executor"); - } + }), job.getTimeout()*1000L)); + + return executionRef.get(); } private void log(Throwable e, TaskLogger logger) { @@ -896,9 +821,9 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz public Void call() throws Exception { JobContext jobContext = getJobContext(buildId); if (jobContext != null) { - JobExecutor jobExecutor = jobExecutors.get(jobContext.getJobToken()); - if (jobExecutor != null) - jobExecutor.resume(jobContext); + JobRunnable jobRunnable = jobRunnables.get(jobContext.getJobToken()); + if (jobRunnable != null) + jobRunnable.resume(jobContext); } return null; } @@ -925,9 +850,9 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz @Override public Void call() throws Exception { JobContext jobContext = getJobContext(jobToken, true); - JobExecutor jobExecutor = jobExecutors.get(jobContext.getJobToken()); - if (jobExecutor != null) { - Shell shell = jobExecutor.openShell(jobContext, terminal); + JobRunnable jobRunnable = jobRunnables.get(jobContext.getJobToken()); + if (jobRunnable != null) { + Shell shell = jobRunnable.openShell(jobContext, terminal); jobShells.put(terminal.getSessionId(), shell); } else { throw new ExplicitException("Job shell not ready"); @@ -1181,7 +1106,7 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz } catch (InterruptedException e) { } } - scheduledTasks.values().stream().forEach(it1->it1.stream().forEach(it2->taskScheduler.unschedule(it2))); + scheduledTasks.values().forEach(it1-> it1.forEach(taskScheduler::unschedule)); scheduledTasks.clear(); } @@ -1294,9 +1219,9 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz jobLogger.log("Job finished"); } catch (TimeoutException e) { build.setStatus(Build.Status.TIMED_OUT); - } catch (CancellationException e) { - if (e instanceof CancellerAwareCancellationException) { - Long cancellerId = ((CancellerAwareCancellationException) e).getCancellerId(); + } catch (java.util.concurrent.CancellationException e) { + if (e instanceof CancellationException) { + Long cancellerId = ((CancellationException) e).getCancellerId(); if (cancellerId != null) build.setCanceller(userManager.load(cancellerId)); } @@ -1308,7 +1233,7 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz jobLogger.error(explicitException.getMessage()); else if (ExceptionUtils.find(e, FailedException.class) == null) jobLogger.error("Error running job", e); - } catch (InterruptedException e) { + } catch (InterruptedException ignored) { } finally { build.setFinishDate(new Date()); buildManager.save(build); @@ -1449,28 +1374,51 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz }); } - private void runJobLocal(String jobToken, AgentInfo agentInfo) { - JobContext jobContext = getJobContext(jobToken, true); - Long buildId = jobContext.getBuildId(); - - transactionManager.run(new Runnable() { + @Override + public void runJob(UUID serverUUID, ClusterRunnable runnable) { + Future future = null; + try { + future = clusterManager.submitToServer(serverUUID, new ClusterTask() { - @Override - public void run() { - Build build = buildManager.load(buildId); - build.setStatus(Build.Status.RUNNING); - build.setRunningDate(new Date()); - if (agentInfo != null) - build.setAgent(agentManager.load(agentInfo.getId())); - buildManager.save(build); - listenerRegistry.post(new BuildRunning(build)); + private static final long serialVersionUID = 1L; + + @Override + public Void call() throws Exception { + runnable.run(); + return null; + } + + }); + + // future.get() here does not respond to thread interruption + while (!future.isDone()) + Thread.sleep(1000); + future.get(); // call get() to throw possible execution exceptions + } catch (InterruptedException e) { + if (future != null) + future.cancel(true); + throw new RuntimeException(e); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } + + @Override + public void runJobLocal(JobContext jobContext, JobRunnable runnable) { + while (thread == null) { + try { + Thread.sleep(100); + } catch (InterruptedException e) { + throw new RuntimeException(e); } + } - }); + Long buildId = jobContext.getBuildId(); + String jobToken = jobContext.getJobToken(); jobServers.put(jobToken, clusterManager.getLocalServerUUID()); - JobExecutor jobExecutor = jobContext.getJobExecutor(); - jobExecutors.put(jobToken, jobExecutor); + + jobRunnables.put(jobToken, runnable); try { TaskLogger jobLogger = logManager.getJobLogger(jobToken); if (jobLogger == null) { @@ -1478,21 +1426,21 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz @Override public void log(String message, String sessionId) { - projectManager.runOnProjectServer(jobContext.getProjectId(), new LogTask(jobToken, message, sessionId)); + projectManager.runOnProjectServer(jobContext.getProjectId(), new LogTask(jobToken, message, sessionId)); } - + }; logManager.addJobLogger(jobToken, jobLogger); try { - jobExecutor.execute(jobContext, jobLogger, agentInfo); + runnable.run(jobLogger); } finally { logManager.removeJobLogger(jobToken); } } else { - jobExecutor.execute(jobContext, jobLogger, agentInfo); + runnable.run(jobLogger); } } finally { - jobExecutors.remove(jobToken); + jobRunnables.remove(jobToken); jobServers.remove(jobToken); } } @@ -1639,21 +1587,4 @@ public class DefaultJobManager implements JobManager, Runnable, CodePullAuthoriz } } - private static class JobRunnable implements ResourceRunnable { - - private static final long serialVersionUID = 1L; - - private final String jobToken; - - public JobRunnable(String jobToken) { - this.jobToken = jobToken; - } - - @Override - public void run(AgentInfo agentInfo) { - OneDev.getInstance(DefaultJobManager.class).runJobLocal(jobToken, agentInfo); - } - - } - } \ No newline at end of file diff --git a/server-core/src/main/java/io/onedev/server/job/DefaultResourceAllocator.java b/server-core/src/main/java/io/onedev/server/job/DefaultResourceAllocator.java index 6c74ab0bf7..aaf339e2f6 100644 --- a/server-core/src/main/java/io/onedev/server/job/DefaultResourceAllocator.java +++ b/server-core/src/main/java/io/onedev/server/job/DefaultResourceAllocator.java @@ -1,210 +1,139 @@ package io.onedev.server.job; -import java.io.ObjectStreamException; -import java.io.Serializable; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Future; -import java.util.stream.Collectors; - -import javax.inject.Inject; -import javax.inject.Singleton; - -import org.eclipse.jetty.websocket.api.Session; -import org.hibernate.query.Query; - import com.hazelcast.cluster.Member; import com.hazelcast.cluster.MembershipEvent; import com.hazelcast.cluster.MembershipListener; -import com.hazelcast.core.EntryEvent; -import com.hazelcast.core.EntryListener; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.map.IMap; -import com.hazelcast.map.MapEvent; import com.hazelcast.replicatedmap.ReplicatedMap; - -import io.onedev.agent.AgentData; import io.onedev.commons.loader.ManagedSerializedForm; import io.onedev.commons.utils.ExplicitException; import io.onedev.server.OneDev; -import io.onedev.server.ServerConfig; import io.onedev.server.cluster.ClusterManager; +import io.onedev.server.cluster.ClusterRunnable; import io.onedev.server.cluster.ClusterTask; import io.onedev.server.entitymanager.AgentManager; import io.onedev.server.event.Listen; import io.onedev.server.event.agent.AgentConnected; import io.onedev.server.event.agent.AgentDisconnected; import io.onedev.server.event.entity.EntityPersisted; -import io.onedev.server.event.entity.EntityRemoved; import io.onedev.server.event.system.SystemStarted; +import io.onedev.server.model.AbstractEntity; import io.onedev.server.model.Agent; -import io.onedev.server.persistence.SessionManager; import io.onedev.server.persistence.TransactionManager; -import io.onedev.server.persistence.annotation.Sessional; import io.onedev.server.persistence.annotation.Transactional; -import io.onedev.server.persistence.dao.Dao; import io.onedev.server.search.entity.agent.AgentQuery; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import oshi.SystemInfo; + +import javax.inject.Inject; +import javax.inject.Singleton; +import java.io.ObjectStreamException; +import java.io.Serializable; +import java.util.*; +import java.util.stream.Collectors; @Singleton public class DefaultResourceAllocator implements ResourceAllocator, Serializable { + private static final Logger logger = LoggerFactory.getLogger(DefaultResourceAllocator.class); + private final AgentManager agentManager; private final ClusterManager clusterManager; - private final ExecutorService executorService; - - private final ServerConfig serverConfig; - - private final Map queryCaches = new HashMap<>(); - - private final SessionManager sessionManager; - private final TransactionManager transactionManager; - private final Dao dao; + private final JobManager jobManager; - private volatile ReplicatedMap> serverResourceQuotas; + private final Object serverAllocSync = new Object(); - private volatile IMap> serverResourceUsages; + private final Object agentAllocSync = new Object(); - private volatile ReplicatedMap> agentResourceQuotas; + private volatile ReplicatedMap serverCpus; + + private volatile ReplicatedMap agentCpus; - private volatile IMap> agentResourceUsages; + private volatile IMap serverUsed; - private volatile IMap agentPaused; + private volatile IMap agentUsed; + + private volatile IMap agentDisconnecting; @Inject - public DefaultResourceAllocator(Dao dao, AgentManager agentManager, - SessionManager sessionManager, TransactionManager transactionManager, - ClusterManager clusterManager, ServerConfig serverConfig, - ExecutorService executorService) { - this.dao = dao; + public DefaultResourceAllocator(AgentManager agentManager, TransactionManager transactionManager, + ClusterManager clusterManager, JobManager jobManager) { this.agentManager = agentManager; - this.sessionManager = sessionManager; this.transactionManager = transactionManager; this.clusterManager = clusterManager; - this.serverConfig = serverConfig; - this.executorService = executorService; + this.jobManager = jobManager; } public Object writeReplace() throws ObjectStreamException { return new ManagedSerializedForm(ResourceAllocator.class); } - private EntryListener newResourceChangeListener() { - return new EntryListener() { - - private void notifyResourceChange() { - synchronized (DefaultResourceAllocator.this) { - DefaultResourceAllocator.this.notifyAll(); - } - } - - @Override - public void entryAdded(EntryEvent event) { - notifyResourceChange(); - } - - @Override - public void entryUpdated(EntryEvent event) { - notifyResourceChange(); - } - - @Override - public void entryRemoved(EntryEvent event) { - notifyResourceChange(); - } - - @Override - public void entryEvicted(EntryEvent event) { - notifyResourceChange(); - } - - @Override - public void entryExpired(EntryEvent event) { - notifyResourceChange(); - } - - @Override - public void mapCleared(MapEvent event) { - notifyResourceChange(); - } - - @Override - public void mapEvicted(MapEvent event) { - notifyResourceChange(); - } - - }; - } - - @SuppressWarnings("unchecked") @Transactional @Listen(10) public void on(SystemStarted event) { HazelcastInstance hazelcastInstance = clusterManager.getHazelcastInstance(); - serverResourceQuotas = hazelcastInstance.getReplicatedMap("serverResourceQuotas"); - serverResourceQuotas.addEntryListener(newResourceChangeListener()); - - Map resourceCounts = new HashMap<>(); - resourceCounts.put(CPU, serverConfig.getServerCpu()); - resourceCounts.put(MEMORY, serverConfig.getServerMemory()); + serverCpus = hazelcastInstance.getReplicatedMap("serverCpus"); UUID localServerUUID = clusterManager.getLocalServerUUID(); - serverResourceQuotas.put(localServerUUID, resourceCounts); - - serverResourceUsages = hazelcastInstance.getMap("serverResourceUsages"); - serverResourceUsages.put(localServerUUID, new HashMap<>()); - serverResourceUsages.addEntryListener(newResourceChangeListener(), false); - - agentResourceQuotas = hazelcastInstance.getReplicatedMap("agentResourceQuotas"); - agentResourceQuotas.addEntryListener(newResourceChangeListener()); - - agentResourceUsages = hazelcastInstance.getMap("agentResourceUsages"); - agentResourceUsages.addEntryListener(newResourceChangeListener(), false); - - agentPaused = hazelcastInstance.getMap("agentPaused"); - agentPaused.addEntryListener(newResourceChangeListener(), false); - - if (clusterManager.isLeaderServer()) { - Query query = dao.getSession().createQuery(String.format("select id, %s from Agent", Agent.PROP_PAUSED)); - for (Object[] fields: (List)query.list()) - agentPaused.put((Long)fields[0], (Boolean)fields[1]); + try { + serverCpus.put( + localServerUUID, + new SystemInfo().getHardware().getProcessor().getLogicalProcessorCount()); + } catch (Exception e) { + logger.debug("Error calling oshi", e); + serverCpus.put(localServerUUID, 4); } + + agentCpus = hazelcastInstance.getReplicatedMap("agentCpus"); + serverUsed = hazelcastInstance.getMap("serverUsed"); + agentUsed = hazelcastInstance.getMap("agentUsed"); + agentDisconnecting = hazelcastInstance.getMap("agentDisconnecting"); hazelcastInstance.getCluster().addMembershipListener(new MembershipListener() { @Override public void memberRemoved(MembershipEvent membershipEvent) { if (clusterManager.isLeaderServer()) { - serverResourceQuotas.remove(membershipEvent.getMember().getUuid()); - serverResourceUsages.remove(membershipEvent.getMember().getUuid()); + UUID removedServerUUID = membershipEvent.getMember().getUuid(); + serverCpus.remove(removedServerUUID); + + Set keysToRemove = new HashSet<>(); + for (var key: serverUsed.keySet()) { + if (key.startsWith(removedServerUUID.toString() + ":")) + keysToRemove.add(key); + } + for (var keyToRemove: keysToRemove) + serverUsed.remove(keyToRemove); Set agentIdsToRemove = new HashSet<>(); for (var entry: agentManager.getAgentServers().entrySet()) { - if (entry.getValue().equals(membershipEvent.getMember().getUuid())) + if (entry.getValue().equals(removedServerUUID)) agentIdsToRemove.add(entry.getKey()); } - for (Long agentId: agentIdsToRemove) { - agentResourceQuotas.remove(agentId); - agentResourceUsages.remove(agentId); + + keysToRemove.clear(); + for (var agentId: agentIdsToRemove) { + agentCpus.remove(agentId); + for (var key: agentUsed.keySet()) { + if (key.startsWith(agentId + ":")) + keysToRemove.add(key); + } } + for (var keyToRemove: keysToRemove) + agentUsed.remove(keyToRemove); } } @Override public void memberAdded(MembershipEvent membershipEvent) { - + notifyServerAlloc(); } }); @@ -215,27 +144,46 @@ public class DefaultResourceAllocator implements ResourceAllocator, Serializable @Listen public void on(AgentConnected event) { Long agentId = event.getAgent().getId(); - sessionManager.runAsyncAfterCommit(new Runnable() { + Integer agentCpus = event.getAgent().getCpus(); + transactionManager.runAfterCommit(new Runnable() { @Override public void run() { - // Synchronize at very start of the method to make sure it is not possible for db connection - // to wait for synchronization block - synchronized (DefaultResourceAllocator.this) { - Agent agent = agentManager.load(agentId); - agentResourceQuotas.put(agentId, agent.getResources()); - agentResourceUsages.put(agentId, new HashMap<>()); - - for (QueryCache cache: queryCaches.values()) { - if (cache.query.matches(agent)) - cache.result.add(agentId); - } - } + DefaultResourceAllocator.this.agentCpus.put(agentId, agentCpus); + agentDisconnecting.remove(agentId); + notifyAgentAlloc(); } }); } + private void notifyServerAlloc() { + clusterManager.submitToAllServers((ClusterTask) () -> { + synchronized (serverAllocSync) { + serverAllocSync.notifyAll(); + } + return null; + }); + } + + private void notifyAgentAlloc() { + clusterManager.submitToAllServers((ClusterTask) () -> { + synchronized (agentAllocSync) { + agentAllocSync.notifyAll(); + } + return null; + }); + } + + @Listen + public void on(EntityPersisted event) { + if (event.getEntity() instanceof Agent) { + Agent agent = (Agent) event.getEntity(); + if (!agent.isPaused()) + notifyAgentAlloc(); + } + } + @Transactional @Listen public void on(AgentDisconnected event) { @@ -244,178 +192,25 @@ public class DefaultResourceAllocator implements ResourceAllocator, Serializable @Override public void run() { - // Run in a separate thread to make sure it is not possible for db connection to - // wait for synchronization block - executorService.execute(new Runnable() { - - @Override - public void run() { - agentResourceQuotas.remove(agentId); - agentResourceUsages.remove(agentId); - - synchronized (DefaultResourceAllocator.this) { - for (QueryCache cache: queryCaches.values()) - cache.result.remove(agentId); - } - } - - }); + agentCpus.remove(agentId); + + Set keysToRemove = new HashSet<>(); + for (var key: agentUsed.keySet()) { + if (key.startsWith(agentId + ":")) + keysToRemove.add(key); + } + for (var keyToRemove: keysToRemove) + agentUsed.remove(keyToRemove); } }); } - @Transactional - @Listen - public void on(EntityPersisted event) { - if (event.getEntity() instanceof Agent) { - Long agentId = event.getEntity().getId(); - boolean paused = ((Agent)event.getEntity()).isPaused(); - transactionManager.runAfterCommit(new Runnable() { - - @Override - public void run() { - agentPaused.put(agentId, paused); - } - - }); - } - } - - @Transactional - @Listen - public void on(EntityRemoved event) { - if (event.getEntity() instanceof Agent) { - Long agentId = event.getEntity().getId(); - transactionManager.runAfterCommit(new Runnable() { - - @Override - public void run() { - synchronized (DefaultResourceAllocator.this) { - agentPaused.remove(agentId); - } - } - }); - } - } - - private int getAllocationScore(Map resourceQuotas, Map resourceUsages, - Map resourceRequirements) { - for (Map.Entry entry: resourceRequirements.entrySet()) { - Integer totalCount = resourceQuotas.get(entry.getKey()); - if (totalCount == null) - totalCount = 0; - Integer usedCount = resourceUsages.get(entry.getKey()); - if (usedCount == null) - usedCount = 0; - if (usedCount + entry.getValue() > totalCount) - return 0; - } - - Integer cpuTotal = resourceQuotas.get(CPU); - if (cpuTotal == null) - cpuTotal = 0; - Integer memoryTotal = resourceQuotas.get(MEMORY); - if (memoryTotal == null) - memoryTotal = 0; - - Integer cpuUsed = resourceUsages.get(CPU); - if (cpuUsed == null) - cpuUsed = 0; - Integer cpuRequired = resourceRequirements.get(CPU); - if (cpuRequired == null) - cpuRequired = 0; - cpuUsed += cpuRequired; - if (cpuUsed == 0) - cpuUsed = 1; - - Integer memoryUsed = resourceUsages.get(CPU); - if (memoryUsed == null) - memoryUsed = 0; - Integer memoryRequired = resourceRequirements.get(CPU); - if (memoryRequired == null) - memoryRequired = 0; - memoryUsed += memoryRequired; - if (memoryUsed == 0) - memoryUsed = 1; - - int score = cpuTotal*400/cpuUsed + memoryTotal*100/memoryUsed; - if (score <= 0) - score = 1; - return score; - } - - private UUID allocateServer(Map resourceRequirements) { - UUID allocatedServerUUID = null; - - synchronized(this) { - while (true) { - int maxScore = 0; - for (Member server: clusterManager.getHazelcastInstance().getCluster().getMembers()) { - var totalResourceCounts = serverResourceQuotas.get(server.getUuid()); - if (totalResourceCounts != null) { - var usedResourceCounts = serverResourceUsages.get(server.getUuid()); - if (usedResourceCounts == null) - usedResourceCounts = new HashMap<>(); - int score = getAllocationScore(totalResourceCounts, usedResourceCounts, resourceRequirements); - if (score > maxScore) { - allocatedServerUUID = server.getUuid(); - maxScore = score; - } - } - } - if (allocatedServerUUID != null) - break; - try { - wait(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - } - - return allocatedServerUUID; - } - - private Long allocateAgent(AgentQuery agentQuery, Map resourceRequirements) { - Set agentIds = agentManager.query(agentQuery, 0, Integer.MAX_VALUE) - .stream().map(it->it.getId()).collect(Collectors.toSet()); - Long allocatedAgentId = 0L; - - synchronized(this) { - String uuid = UUID.randomUUID().toString(); - queryCaches.put(uuid, new QueryCache(agentQuery, agentIds)); - try { - while (true) { - int maxScore = 0; - for (Long agentId: agentIds) { - Map totalResourceCounts = agentResourceQuotas.get(agentId); - Boolean paused = agentPaused.get(agentId); - if (totalResourceCounts != null && paused != null && !paused) { - var usedResourceCounts = agentResourceUsages.get(agentId); - if (usedResourceCounts == null) - usedResourceCounts = new HashMap<>(); - - int score = getAllocationScore(totalResourceCounts, usedResourceCounts, resourceRequirements); - if (score > maxScore) { - allocatedAgentId = agentId; - maxScore = score; - } - } - } - if (allocatedAgentId != 0) - break; - try { - wait(); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - } finally { - queryCaches.remove(uuid); - } - } - return allocatedAgentId; + private int getAllocationScore(int total, int used, int required) { + if (used + required <= total) + return total * 100 / (used + required); + else + return 0; } @Transactional @@ -423,139 +218,167 @@ public class DefaultResourceAllocator implements ResourceAllocator, Serializable agentManager.load(agentId).setLastUsedDate(new Date()); } - private static class QueryCache { - - AgentQuery query; - - Collection result; - - QueryCache(AgentQuery query, Collection result) { - this.query = query; - this.result = result; - } - - } - @Override - public void waitingForAgentResourceToBeReleased(Long agentId) { - synchronized (this) { - Map usedResourceCounts = agentResourceUsages.remove(agentId); - if (usedResourceCounts != null) { - while (usedResourceCounts.values().stream().anyMatch(it->it>0)) { - try { - wait(); - } catch (InterruptedException e) { - } + public void wantToDisconnectAgent(Long agentId) { + agentDisconnecting.put(agentId, agentId); + while (true) { + boolean idle = true; + for (var entry : agentUsed.entrySet()) { + if (entry.getKey().startsWith(agentId + ":") && entry.getValue() > 0) { + idle = false; + break; + } + } + if (idle) { + break; + } else { + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + throw new RuntimeException(e); } } } } - - private void increaseResourceCounts(Map resourceCounts, Map increment) { - for (Map.Entry entry: increment.entrySet()) { - Integer count = resourceCounts.get(entry.getKey()); - Integer newCount = (count != null? count + entry.getValue(): entry.getValue()); - resourceCounts.put(entry.getKey(), newCount >= 0? newCount: 0); - } - } - private synchronized void increaseResourceUsages(UUID serverUUID, Map increment) { - Map resourceCounts = serverResourceUsages.get(serverUUID); - if (resourceCounts == null) - resourceCounts = new HashMap<>(); - increaseResourceCounts(resourceCounts, increment); - serverResourceUsages.put(serverUUID, resourceCounts); - } - - private synchronized void increaseResourceUsages(Long agentId, Map increment) { - Map resourceCounts = agentResourceUsages.get(agentId); - if (resourceCounts == null) - resourceCounts = new HashMap<>(); - increaseResourceCounts(resourceCounts, increment); - agentResourceUsages.put(agentId, resourceCounts); - } - - private Map makeNegative(Map map) { - Map negative = new HashMap<>(); - for (Map.Entry entry: map.entrySet()) - negative.put(entry.getKey(), entry.getValue() * -1); - return negative; - } - private AgentManager getAgentManager() { return OneDev.getInstance(AgentManager.class); } - - @Sessional - protected AgentData getAgentData(Long agentId) { - return getAgentManager().load(agentId).getAgentData(); + + private int getEffectiveTotal(Map cpuMap, T key, int total) { + Integer effectiveTotal = total; + if (effectiveTotal == 0) + effectiveTotal = cpuMap.get(key); + if (effectiveTotal == null) + effectiveTotal = 0; + return effectiveTotal; } - @Override - public void run(ResourceRunnable runnable, AgentQuery agentQuery, Map resourceRequirements) { - Future future = null; - try { - if (agentQuery != null) { - Long agentId = allocateAgent(agentQuery, resourceRequirements); - UUID serverUUID = getAgentManager().getAgentServers().get(agentId); - if (serverUUID == null) - throw new ExplicitException("Can not find server managing allocated agent, please retry later"); + private T allocate(Collection pool, Map cpuMap, Map usedMap, + String resourceHolder, int total, int required) { + T allocated = null; + int maxScore = 0; + for (var each: pool) { + int effectiveTotal = getEffectiveTotal(cpuMap, each, total); + Integer used = usedMap.get(each + ":" + resourceHolder); + if (used == null) + used = 0; + int score = getAllocationScore(effectiveTotal, used, required); - future = clusterManager.submitToServer(serverUUID, new ClusterTask() { - - private static final long serialVersionUID = 1L; - - @Override - public Void call() throws Exception { - updateLastUsedDate(agentId); - - AgentData agentData = getAgentData(agentId); - Session agentSession = getAgentManager().getAgentSession(agentId); - if (agentSession == null) - throw new ExplicitException("Allocated agent not connected to current server, please retry later"); - - increaseResourceUsages(agentId, resourceRequirements); - try { - runnable.run(new AgentInfo(agentId, agentData, agentSession)); - } finally { - increaseResourceUsages(agentId, makeNegative(resourceRequirements)); - } - return null; - } - - }); - } else { - UUID serverUUID = allocateServer(resourceRequirements); - future = clusterManager.submitToServer(serverUUID, new ClusterTask() { - - private static final long serialVersionUID = 1L; - - @Override - public Void call() throws Exception { - UUID localServerUUID = clusterManager.getLocalServerUUID(); - increaseResourceUsages(localServerUUID, resourceRequirements); - try { - runnable.run(null); - } finally { - increaseResourceUsages(localServerUUID, makeNegative(resourceRequirements)); - } - return null; - } - - }); + if (score > maxScore) { + allocated = each; + maxScore = score; + } + } + return allocated; + } + + private boolean acquire(IMap used, String key, int total, int required) { + while (true) { + Integer prevValue = used.get(key); + if (prevValue != null) { + if (prevValue + required <= total) { + if (used.replace(key, prevValue, prevValue + required)) + return true; + } else { + return false; + } + } else { + if (required <= total) { + if (used.putIfAbsent(key, required) == null) + return true; + } else { + return false; + } + } + try { + Thread.sleep(100); + } catch (InterruptedException e) { + throw new RuntimeException(e); } - - // future.get() here does not respond to thread interruption - while (!future.isDone()) - Thread.sleep(1000); - future.get(); // call get() to throw possible execution exceptions - } catch (InterruptedException e) { - if (future != null) - future.cancel(true); - throw new RuntimeException(e); - } catch (ExecutionException e) { - throw new RuntimeException(e); } } + private void release(IMap used, String key, int required) { + while (true) { + int prevValue = used.get(key); + if (used.replace(key, prevValue, prevValue - required)) + break; + try { + Thread.sleep(100); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public void runServerJob(String resourceHolder, int total, int required, + ClusterRunnable runnable) { + UUID serverUUID; + synchronized (serverAllocSync) { + while (true) { + Collection serverUUIDs = clusterManager.getHazelcastInstance().getCluster().getMembers() + .stream().map(Member::getUuid).collect(Collectors.toSet()); + serverUUID = allocate(serverUUIDs, serverCpus, serverUsed, + resourceHolder, total, required); + if (serverUUID != null) { + int effectiveTotal = getEffectiveTotal(serverCpus, serverUUID, total); + if (acquire(serverUsed, serverUUID + ":" + resourceHolder, effectiveTotal, required)) + break; + } + try { + serverAllocSync.wait(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + try { + jobManager.runJob(serverUUID, runnable); + } finally { + release(serverUsed, serverUUID + ":" + resourceHolder, required); + notifyServerAlloc(); + } + } + + @Override + public void runAgentJob(AgentQuery agentQuery, String resourceHolder, + int total, int required, AgentRunnable runnable) { + Long agentId; + synchronized (agentAllocSync) { + while (true) { + Collection agentIds = agentManager.query(agentQuery, 0, Integer.MAX_VALUE) + .stream().filter(it-> it.isOnline() && !it.isPaused()) + .map(AbstractEntity::getId) + .collect(Collectors.toSet()); + agentIds.removeAll(agentDisconnecting.keySet()); + agentId = allocate(agentIds, agentCpus, agentUsed, resourceHolder, total, required); + if (agentId != null) { + int effectiveTotal = getEffectiveTotal(agentCpus, agentId, total); + if (acquire(agentUsed, agentId + ":" + resourceHolder, effectiveTotal, required)) + break; + } + try { + agentAllocSync.wait(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + try { + updateLastUsedDate(agentId); + UUID serverUUID = getAgentManager().getAgentServers().get(agentId); + if (serverUUID == null) + throw new ExplicitException("Can not find server managing allocated agent, please retry later"); + + Long finalAgentId = agentId; + jobManager.runJob(serverUUID, () -> runnable.run(finalAgentId)); + } finally { + release(agentUsed, agentId + ":" + resourceHolder, required); + notifyAgentAlloc(); + } + } + } diff --git a/server-core/src/main/java/io/onedev/server/job/JobContext.java b/server-core/src/main/java/io/onedev/server/job/JobContext.java index 595e2ebb43..cb1e98859a 100644 --- a/server-core/src/main/java/io/onedev/server/job/JobContext.java +++ b/server-core/src/main/java/io/onedev/server/job/JobContext.java @@ -1,17 +1,15 @@ package io.onedev.server.job; -import java.io.Serializable; -import java.util.Collection; -import java.util.List; -import java.util.Map; - -import org.eclipse.jgit.lib.ObjectId; - import io.onedev.k8shelper.Action; import io.onedev.k8shelper.LeafFacade; import io.onedev.server.buildspec.Service; import io.onedev.server.buildspec.job.CacheSpec; import io.onedev.server.model.support.administration.jobexecutor.JobExecutor; +import org.eclipse.jgit.lib.ObjectId; + +import java.io.Serializable; +import java.util.Collection; +import java.util.List; public class JobContext implements Serializable { @@ -41,14 +39,12 @@ public class JobContext implements Serializable { private final List services; - private final Map resourceRequirements; - private final int retried; public JobContext(String jobToken, JobExecutor jobExecutor, Long projectId, String projectPath, String projectGitDir, Long buildId, Long buildNumber, List actions, String refName, ObjectId commitId, Collection caches, - List services, Map resourceRequirements, int retried) { + List services, int retried) { this.jobToken = jobToken; this.jobExecutor = jobExecutor; this.projectId = projectId; @@ -61,7 +57,6 @@ public class JobContext implements Serializable { this.commitId = commitId; this.cacheSpecs = caches; this.services = services; - this.resourceRequirements = resourceRequirements; this.retried = retried; } @@ -97,10 +92,6 @@ public class JobContext implements Serializable { return services; } - public Map getResourceRequirements() { - return resourceRequirements; - } - public Long getProjectId() { return projectId; } diff --git a/server-core/src/main/java/io/onedev/server/job/JobExecution.java b/server-core/src/main/java/io/onedev/server/job/JobExecution.java index cabcad94bb..ccab0e1998 100644 --- a/server-core/src/main/java/io/onedev/server/job/JobExecution.java +++ b/server-core/src/main/java/io/onedev/server/job/JobExecution.java @@ -47,7 +47,7 @@ public class JobExecution { if (isTimedout()) throw new TimeoutException(); else if (cancellerId != null) - throw new CancellerAwareCancellationException(cancellerId); + throw new CancellationException(cancellerId); else future.get(); } diff --git a/server-core/src/main/java/io/onedev/server/job/JobManager.java b/server-core/src/main/java/io/onedev/server/job/JobManager.java index 5e7052513d..42b210e2d2 100644 --- a/server-core/src/main/java/io/onedev/server/job/JobManager.java +++ b/server-core/src/main/java/io/onedev/server/job/JobManager.java @@ -3,9 +3,12 @@ package io.onedev.server.job; import java.io.File; import java.util.List; import java.util.Map; +import java.util.UUID; import javax.annotation.Nullable; +import io.onedev.server.cluster.ClusterRunnable; +import io.onedev.server.model.support.administration.jobexecutor.JobExecutor; import org.eclipse.jgit.lib.ObjectId; import io.onedev.commons.utils.TaskLogger; @@ -34,6 +37,10 @@ public interface JobManager { void cancel(Build build); void resume(Build build); + + void runJob(UUID serverUUID, ClusterRunnable runnable); + + void runJobLocal(JobContext jobContext, JobRunnable runnable); WebShell openShell(Long buildId, Terminal terminal); diff --git a/server-core/src/main/java/io/onedev/server/job/JobRunnable.java b/server-core/src/main/java/io/onedev/server/job/JobRunnable.java new file mode 100644 index 0000000000..ad9f9e1dd5 --- /dev/null +++ b/server-core/src/main/java/io/onedev/server/job/JobRunnable.java @@ -0,0 +1,17 @@ +package io.onedev.server.job; + +import io.onedev.commons.utils.TaskLogger; +import io.onedev.server.terminal.Shell; +import io.onedev.server.terminal.Terminal; + +import java.io.Serializable; + +public interface JobRunnable extends Serializable { + + void run(TaskLogger jobLogger); + + void resume(JobContext jobContext); + + Shell openShell(JobContext jobContext, Terminal terminal); + +} diff --git a/server-core/src/main/java/io/onedev/server/job/ResourceAllocator.java b/server-core/src/main/java/io/onedev/server/job/ResourceAllocator.java index f01fc8cb34..2e04e3d662 100644 --- a/server-core/src/main/java/io/onedev/server/job/ResourceAllocator.java +++ b/server-core/src/main/java/io/onedev/server/job/ResourceAllocator.java @@ -1,20 +1,15 @@ -package io.onedev.server.job; - -import java.util.Map; - -import javax.annotation.Nullable; - -import io.onedev.server.search.entity.agent.AgentQuery; - -public interface ResourceAllocator { - - static final String CPU = "cpu"; - - static final String MEMORY = "memory"; - - void run(ResourceRunnable runnable, @Nullable AgentQuery agentQuery, - Map resourceRequirements); - - void waitingForAgentResourceToBeReleased(Long agentId); - -} +package io.onedev.server.job; + +import io.onedev.server.cluster.ClusterRunnable; +import io.onedev.server.search.entity.agent.AgentQuery; + +public interface ResourceAllocator { + + void runServerJob(String resourceHolder, int total, int required, ClusterRunnable runnable); + + void runAgentJob(AgentQuery agentQuery, String resourceHolder, int total, int required, + AgentRunnable runnable); + + void wantToDisconnectAgent(Long agentId); + +} diff --git a/server-core/src/main/java/io/onedev/server/job/ResourceRunnable.java b/server-core/src/main/java/io/onedev/server/job/ResourceRunnable.java deleted file mode 100644 index 15ee1b4e52..0000000000 --- a/server-core/src/main/java/io/onedev/server/job/ResourceRunnable.java +++ /dev/null @@ -1,11 +0,0 @@ -package io.onedev.server.job; - -import java.io.Serializable; - -import javax.annotation.Nullable; - -public interface ResourceRunnable extends Serializable { - - void run(@Nullable AgentInfo agentInfo); - -} diff --git a/server-core/src/main/java/io/onedev/server/migration/DataMigrator.java b/server-core/src/main/java/io/onedev/server/migration/DataMigrator.java index 0058b9e0e9..02f7c587b4 100644 --- a/server-core/src/main/java/io/onedev/server/migration/DataMigrator.java +++ b/server-core/src/main/java/io/onedev/server/migration/DataMigrator.java @@ -4580,7 +4580,21 @@ public class DataMigrator { "io.onedev.server.git.location."); curlConfigElement.addAttribute("class", clazz); } + } else if (key.equals("PERFORMANCE")) { + Element valueElement = element.element("value"); + if (valueElement != null) { + int cpuIntensiveTaskConcurrency; + try { + HardwareAbstractionLayer hardware = new SystemInfo().getHardware(); + cpuIntensiveTaskConcurrency = hardware.getProcessor().getLogicalProcessorCount(); + } catch (Exception e) { + cpuIntensiveTaskConcurrency = 4; + } + valueElement.addElement("cpuIntensiveTaskConcurrency") + .setText(String.valueOf(cpuIntensiveTaskConcurrency)); + } } + } dom.writeToFile(file, false); } else if (file.getName().startsWith("Projects.xml")) { @@ -4598,6 +4612,15 @@ public class DataMigrator { } } dom.writeToFile(file, false); + } else if (file.getName().startsWith("Agents.xml")) { + VersionedXmlDoc dom = VersionedXmlDoc.fromFile(file); + for (Element element : dom.getRootElement().elements()) { + element.element("memory").detach(); + Element cpuElement = element.element("cpu"); + cpuElement.setName("cpus"); + cpuElement.setText(String.valueOf(Integer.parseInt(cpuElement.getTextTrim())/1000)); + } + dom.writeToFile(file, false); } } } diff --git a/server-core/src/main/java/io/onedev/server/model/Agent.java b/server-core/src/main/java/io/onedev/server/model/Agent.java index 9ec533b2f3..c1ed182d61 100644 --- a/server-core/src/main/java/io/onedev/server/model/Agent.java +++ b/server-core/src/main/java/io/onedev/server/model/Agent.java @@ -1,52 +1,25 @@ package io.onedev.server.model; -import static io.onedev.server.model.Agent.PROP_CPU; -import static io.onedev.server.model.Agent.PROP_IP_ADDRESS; -import static io.onedev.server.model.Agent.PROP_LAST_USED_DATE; -import static io.onedev.server.model.Agent.PROP_MEMORY; -import static io.onedev.server.model.Agent.PROP_NAME; -import static io.onedev.server.model.Agent.PROP_OS_ARCH; -import static io.onedev.server.model.Agent.PROP_OS_NAME; -import static io.onedev.server.model.Agent.PROP_OS_VERSION; -import static io.onedev.server.model.Agent.PROP_PAUSED; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import javax.persistence.CascadeType; -import javax.persistence.Column; -import javax.persistence.Entity; -import javax.persistence.FetchType; -import javax.persistence.Index; -import javax.persistence.JoinColumn; -import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; -import javax.persistence.Table; - -import org.hibernate.annotations.Cache; -import org.hibernate.annotations.CacheConcurrencyStrategy; - import com.google.common.collect.Lists; import com.google.common.collect.Sets; - import io.onedev.agent.AgentData; import io.onedev.k8shelper.OsInfo; import io.onedev.server.OneDev; import io.onedev.server.entitymanager.AgentManager; -import io.onedev.server.job.ResourceAllocator; import io.onedev.server.util.CollectionUtils; +import org.hibernate.annotations.Cache; +import org.hibernate.annotations.CacheConcurrencyStrategy; + +import javax.persistence.*; +import java.util.*; + +import static io.onedev.server.model.Agent.*; @Entity @Table(indexes={ @Index(columnList="o_token_id"), @Index(columnList=PROP_IP_ADDRESS), @Index(columnList=PROP_PAUSED), @Index(columnList=PROP_NAME), @Index(columnList=PROP_OS_NAME), @Index(columnList=PROP_OS_VERSION), - @Index(columnList=PROP_CPU), @Index(columnList=PROP_MEMORY), @Index(columnList=PROP_OS_ARCH), @Index(columnList=PROP_LAST_USED_DATE)}) @Cache(usage=CacheConcurrencyStrategy.READ_WRITE) public class Agent extends AbstractEntity { @@ -61,8 +34,6 @@ public class Agent extends AbstractEntity { public static final String PROP_BUILDS = "builds"; - public static final String PROP_ATTRIBUTES = "attributes"; - public static final String NAME_IP_ADDRESS = "Ip Address"; public static final String PROP_IP_ADDRESS = "ipAddress"; @@ -79,30 +50,20 @@ public class Agent extends AbstractEntity { public static final String PROP_OS_ARCH = "osArch"; - public static final String NAME_CPU = "CPU"; - - public static final String PROP_CPU = "cpu"; - - public static final String NAME_MEMORY = "Memory"; - - public static final String PROP_MEMORY = "memory"; - public static final String PROP_LAST_USED_DATE = "lastUsedDate"; public static final Set ALL_FIELDS = Sets.newHashSet( - NAME_NAME, NAME_IP_ADDRESS, NAME_OS_NAME, NAME_OS_VERSION, NAME_OS_ARCH, NAME_CPU, NAME_MEMORY); + NAME_NAME, NAME_IP_ADDRESS, NAME_OS_NAME, NAME_OS_VERSION, NAME_OS_ARCH); public static final List QUERY_FIELDS = Lists.newArrayList( - NAME_NAME, NAME_IP_ADDRESS, NAME_OS_NAME, NAME_OS_VERSION, NAME_OS_ARCH, NAME_CPU, NAME_MEMORY); + NAME_NAME, NAME_IP_ADDRESS, NAME_OS_NAME, NAME_OS_VERSION, NAME_OS_ARCH); public static final Map ORDER_FIELDS = CollectionUtils.newLinkedHashMap( NAME_NAME, PROP_NAME, NAME_IP_ADDRESS, PROP_IP_ADDRESS, NAME_OS_NAME, PROP_OS_NAME, NAME_OS_VERSION, PROP_OS_VERSION, - NAME_OS_ARCH, PROP_OS_ARCH, - NAME_CPU, PROP_CPU, - NAME_MEMORY, PROP_MEMORY); + NAME_OS_ARCH, PROP_OS_ARCH); @ManyToOne(fetch=FetchType.LAZY) @JoinColumn(nullable=false) @@ -129,9 +90,7 @@ public class Agent extends AbstractEntity { @Column(nullable=false) private String osArch; - private int cpu; - - private int memory; + private int cpus; private boolean temporal; @@ -197,20 +156,12 @@ public class Agent extends AbstractEntity { this.paused = paused; } - public int getCpu() { - return cpu; + public int getCpus() { + return cpus; } - public void setCpu(int cpu) { - this.cpu = cpu; - } - - public int getMemory() { - return memory; - } - - public void setMemory(int memory) { - this.memory = memory; + public void setCpus(int cpus) { + this.cpus = cpus; } public boolean isTemporal() { @@ -250,16 +201,9 @@ public class Agent extends AbstractEntity { return online; } - public Map getResources() { - Map resources = new HashMap<>(); - resources.put(ResourceAllocator.CPU, cpu); - resources.put(ResourceAllocator.MEMORY, memory); - return resources; - } - public AgentData getAgentData() { return new AgentData(getToken().getValue(), new OsInfo(osName, osVersion, osArch), - name, ipAddress, cpu, memory, temporal, getAttributeMap()); + name, ipAddress, cpus, temporal, getAttributeMap()); } } diff --git a/server-core/src/main/java/io/onedev/server/model/support/administration/PerformanceSetting.java b/server-core/src/main/java/io/onedev/server/model/support/administration/PerformanceSetting.java index 3096bbdf97..1519441030 100644 --- a/server-core/src/main/java/io/onedev/server/model/support/administration/PerformanceSetting.java +++ b/server-core/src/main/java/io/onedev/server/model/support/administration/PerformanceSetting.java @@ -2,19 +2,48 @@ package io.onedev.server.model.support.administration; import java.io.Serializable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import io.onedev.server.web.editable.annotation.Editable; +import oshi.SystemInfo; +import oshi.hardware.HardwareAbstractionLayer; @Editable public class PerformanceSetting implements Serializable { private static final long serialVersionUID = 1; + private static final Logger logger= LoggerFactory.getLogger(PerformanceSetting.class); + + private int cpuIntensiveTaskConcurrency; + private int maxGitLFSFileSize = 4096; private int maxUploadFileSize = 20; private int maxCodeSearchEntries = 100; + public PerformanceSetting() { + try { + HardwareAbstractionLayer hardware = new SystemInfo().getHardware(); + cpuIntensiveTaskConcurrency = hardware.getProcessor().getLogicalProcessorCount(); + } catch (Exception e) { + logger.debug("Error calling oshi", e); + cpuIntensiveTaskConcurrency = 4; + } + } + + @Editable(order=100, name="CPU Intensive Task Concurrency", description="Specify max concurrent CPU intensive " + + "tasks, such as Git repository pull/push, repository index, etc.") + public int getCpuIntensiveTaskConcurrency() { + return cpuIntensiveTaskConcurrency; + } + + public void setCpuIntensiveTaskConcurrency(int cpuIntensiveTaskConcurrency) { + this.cpuIntensiveTaskConcurrency = cpuIntensiveTaskConcurrency; + } + @Editable(order=600, name="Max Git LFS File Size (MB)", description="Specify max git LFS file size in mega bytes") public int getMaxGitLFSFileSize() { return maxGitLFSFileSize; diff --git a/server-core/src/main/java/io/onedev/server/model/support/administration/jobexecutor/JobExecutor.java b/server-core/src/main/java/io/onedev/server/model/support/administration/jobexecutor/JobExecutor.java index 6ada8e3ce1..b5f0cdafb7 100644 --- a/server-core/src/main/java/io/onedev/server/model/support/administration/jobexecutor/JobExecutor.java +++ b/server-core/src/main/java/io/onedev/server/model/support/administration/jobexecutor/JobExecutor.java @@ -1,207 +1,213 @@ -package io.onedev.server.model.support.administration.jobexecutor; - -import static java.nio.charset.StandardCharsets.UTF_8; - -import java.io.File; -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import javax.annotation.Nullable; -import javax.validation.constraints.NotEmpty; -import javax.ws.rs.core.Response; - -import com.google.common.base.Splitter; -import com.google.common.base.Throwables; - -import io.onedev.commons.loader.ExtensionPoint; -import io.onedev.commons.utils.FileUtils; -import io.onedev.commons.utils.TaskLogger; -import io.onedev.server.OneDev; -import io.onedev.server.ServerConfig; -import io.onedev.server.job.AgentInfo; -import io.onedev.server.job.JobContext; -import io.onedev.server.search.entity.agent.AgentQuery; -import io.onedev.server.terminal.Shell; -import io.onedev.server.terminal.Terminal; -import io.onedev.server.terminal.TerminalManager; -import io.onedev.server.util.ExceptionUtils; -import io.onedev.server.util.PKCS12CertExtractor; -import io.onedev.server.util.usage.Usage; -import io.onedev.server.util.validation.annotation.DnsName; -import io.onedev.server.web.editable.annotation.Editable; -import io.onedev.server.web.editable.annotation.JobAuthorization; -import io.onedev.server.web.editable.annotation.ShowCondition; - -@ExtensionPoint -@Editable -public abstract class JobExecutor implements Serializable { - - private static final long serialVersionUID = 1L; - - private boolean enabled = true; - - private String name; - - private String jobAuthorization; - - private boolean shellAccessEnabled; - - private boolean sitePublishEnabled; - - private int cacheTTL = 7; - - public boolean isEnabled() { - return enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - @Editable(order=10) - @DnsName //this name may be used as namespace/network prefixes, so put a strict constraint - @NotEmpty - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - @Editable(order=20, description="Enable this to allow project managers to open web terminal to running builds. " - + "WARNING: Users with shell access can take control of the node used by " - + "the executor. You should configure job authorization below to make sure the executor can only be " - + "used by trusted jobs if this option is enabled") - @ShowCondition("isTerminalSupported") - public boolean isShellAccessEnabled() { - return shellAccessEnabled; - } - - public void setShellAccessEnabled(boolean shellAccessEnabled) { - this.shellAccessEnabled = shellAccessEnabled; - } - - @Editable(order=30, description="Enable this to allow to run site publish step. OneDev will serve project " - + "site files as is. To avoid XSS attack, make sure this executor can only be used by trusted jobs") - public boolean isSitePublishEnabled() { - return sitePublishEnabled; - } - - public void setSitePublishEnabled(boolean sitePublishEnabled) { - this.sitePublishEnabled = sitePublishEnabled; - } - - @SuppressWarnings("unused") - private static boolean isTerminalSupported() { - return OneDev.getInstance(TerminalManager.class).isTerminalSupported(); - } - - @Editable(order=10000, placeholder="Can be used by any jobs", - description="Optionally specify jobs authorized to use this executor") - @JobAuthorization - @Nullable - public String getJobAuthorization() { - return jobAuthorization; - } - - public void setJobAuthorization(String jobAuthorization) { - this.jobAuthorization = jobAuthorization; - } - - @Editable(order=50000, group="More Settings", description="Specify job cache TTL (time to live) by days. " - + "OneDev may create multiple job caches even for same cache key to avoid cache conflicts when " - + "running jobs concurrently. This setting tells OneDev to remove caches inactive for specified " - + "time period to save disk space") - public int getCacheTTL() { - return cacheTTL; - } - - public void setCacheTTL(int cacheTTL) { - this.cacheTTL = cacheTTL; - } - - @Nullable - public abstract AgentQuery getAgentRequirement(); - - public abstract void execute(JobContext jobContext, TaskLogger jobLogger, @Nullable AgentInfo agentInfo); - - public abstract void resume(JobContext jobContext); - - public abstract Shell openShell(JobContext jobContext, Terminal terminal); - - public boolean isPlaceholderAllowed() { - return true; - } - - public Usage onDeleteProject(String projectPath) { - Usage usage = new Usage(); - if (jobAuthorization != null - && io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization).isUsingProject(projectPath)) { - usage.add("job requirement" ); - } - return usage; - } - - public void onMoveProject(String oldPath, String newPath) { - if (jobAuthorization != null) { - io.onedev.server.job.authorization.JobAuthorization parsedJobAuthorization = - io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization); - parsedJobAuthorization.onMoveProject(oldPath, newPath); - jobAuthorization = parsedJobAuthorization.toString(); - } - } - - public Usage onDeleteUser(String userName) { - Usage usage = new Usage(); - if (jobAuthorization != null - && io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization).isUsingUser(userName)) { - usage.add("job authorization" ); - } - return usage; - } - - public void onRenameUser(String oldName, String newName) { - if (jobAuthorization != null) { - io.onedev.server.job.authorization.JobAuthorization parsedJobAuthorization = - io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization); - parsedJobAuthorization.onRenameUser(oldName, newName); - jobAuthorization = parsedJobAuthorization.toString(); - } - } - - protected List getTrustCertContent() { - List trustCertContent = new ArrayList<>(); - ServerConfig serverConfig = OneDev.getInstance(ServerConfig.class); - File keystoreFile = serverConfig.getKeystoreFile(); - if (keystoreFile != null) { - String password = serverConfig.getKeystorePassword(); - for (Map.Entry entry: new PKCS12CertExtractor(keystoreFile, password).extact().entrySet()) - trustCertContent.addAll(Splitter.on('\n').trimResults().splitToList(entry.getValue())); - } - if (serverConfig.getTrustCertsDir() != null) { - for (File file: serverConfig.getTrustCertsDir().listFiles()) { - if (file.isFile()) { - try { - trustCertContent.addAll(FileUtils.readLines(file, UTF_8)); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - } - } - return trustCertContent; - } - - protected String getErrorMessage(Exception exception) { - Response response = ExceptionUtils.buildResponse(exception); - if (response != null) - return response.getEntity().toString(); - else - return Throwables.getStackTraceAsString(exception); - } - -} +package io.onedev.server.model.support.administration.jobexecutor; + +import com.google.common.base.Splitter; +import com.google.common.base.Throwables; +import io.onedev.commons.loader.ExtensionPoint; +import io.onedev.commons.utils.FileUtils; +import io.onedev.server.OneDev; +import io.onedev.server.ServerConfig; +import io.onedev.server.entitymanager.AgentManager; +import io.onedev.server.entitymanager.BuildManager; +import io.onedev.server.event.ListenerRegistry; +import io.onedev.server.event.project.build.BuildRunning; +import io.onedev.server.job.JobContext; +import io.onedev.server.model.Build; +import io.onedev.server.persistence.TransactionManager; +import io.onedev.server.terminal.TerminalManager; +import io.onedev.server.util.ExceptionUtils; +import io.onedev.server.util.PKCS12CertExtractor; +import io.onedev.server.util.usage.Usage; +import io.onedev.server.util.validation.annotation.DnsName; +import io.onedev.server.web.editable.annotation.Editable; +import io.onedev.server.web.editable.annotation.JobAuthorization; +import io.onedev.server.web.editable.annotation.ShowCondition; + +import javax.annotation.Nullable; +import javax.validation.constraints.NotEmpty; +import javax.ws.rs.core.Response; +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.Map; + +import static java.nio.charset.StandardCharsets.UTF_8; + +@ExtensionPoint +@Editable +public abstract class JobExecutor implements Serializable { + + private static final long serialVersionUID = 1L; + + private boolean enabled = true; + + private String name; + + private String jobAuthorization; + + private boolean shellAccessEnabled; + + private boolean sitePublishEnabled; + + private int cacheTTL = 7; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + @Editable(order=10) + @DnsName //this name may be used as namespace/network prefixes, so put a strict constraint + @NotEmpty + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Editable(order=20, description="Enable this to allow project managers to open web terminal to running builds. " + + "WARNING: Users with shell access can take control of the node used by " + + "the executor. You should configure job authorization below to make sure the executor can only be " + + "used by trusted jobs if this option is enabled") + @ShowCondition("isTerminalSupported") + public boolean isShellAccessEnabled() { + return shellAccessEnabled; + } + + public void setShellAccessEnabled(boolean shellAccessEnabled) { + this.shellAccessEnabled = shellAccessEnabled; + } + + @Editable(order=30, description="Enable this to allow to run site publish step. OneDev will serve project " + + "site files as is. To avoid XSS attack, make sure this executor can only be used by trusted jobs") + public boolean isSitePublishEnabled() { + return sitePublishEnabled; + } + + public void setSitePublishEnabled(boolean sitePublishEnabled) { + this.sitePublishEnabled = sitePublishEnabled; + } + + @SuppressWarnings("unused") + private static boolean isTerminalSupported() { + return OneDev.getInstance(TerminalManager.class).isTerminalSupported(); + } + + @Editable(order=10000, placeholder="Can be used by any jobs", + description="Optionally specify jobs authorized to use this executor") + @JobAuthorization + @Nullable + public String getJobAuthorization() { + return jobAuthorization; + } + + public void setJobAuthorization(String jobAuthorization) { + this.jobAuthorization = jobAuthorization; + } + + @Editable(order=50000, group="More Settings", description="Specify job cache TTL (time to live) by days. " + + "OneDev may create multiple job caches even for same cache key to avoid cache conflicts when " + + "running jobs concurrently. This setting tells OneDev to remove caches inactive for specified " + + "time period to save disk space") + public int getCacheTTL() { + return cacheTTL; + } + + public void setCacheTTL(int cacheTTL) { + this.cacheTTL = cacheTTL; + } + + public abstract void execute(JobContext jobContext); + + public boolean isPlaceholderAllowed() { + return true; + } + + public Usage onDeleteProject(String projectPath) { + Usage usage = new Usage(); + if (jobAuthorization != null + && io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization).isUsingProject(projectPath)) { + usage.add("job requirement" ); + } + return usage; + } + + public void onMoveProject(String oldPath, String newPath) { + if (jobAuthorization != null) { + io.onedev.server.job.authorization.JobAuthorization parsedJobAuthorization = + io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization); + parsedJobAuthorization.onMoveProject(oldPath, newPath); + jobAuthorization = parsedJobAuthorization.toString(); + } + } + + public Usage onDeleteUser(String userName) { + Usage usage = new Usage(); + if (jobAuthorization != null + && io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization).isUsingUser(userName)) { + usage.add("job authorization" ); + } + return usage; + } + + public void onRenameUser(String oldName, String newName) { + if (jobAuthorization != null) { + io.onedev.server.job.authorization.JobAuthorization parsedJobAuthorization = + io.onedev.server.job.authorization.JobAuthorization.parse(jobAuthorization); + parsedJobAuthorization.onRenameUser(oldName, newName); + jobAuthorization = parsedJobAuthorization.toString(); + } + } + + protected void notifyJobRunning(Long buildId, @Nullable Long agentId) { + OneDev.getInstance(TransactionManager.class).run(() -> { + BuildManager buildManager = OneDev.getInstance(BuildManager.class); + Build build = buildManager.load(buildId); + build.setStatus(Build.Status.RUNNING); + build.setRunningDate(new Date()); + if (agentId != null) + build.setAgent(OneDev.getInstance(AgentManager.class).load(agentId)); + buildManager.save(build); + OneDev.getInstance(ListenerRegistry.class).post(new BuildRunning(build)); + }); + } + + protected List getTrustCertContent() { + List trustCertContent = new ArrayList<>(); + ServerConfig serverConfig = OneDev.getInstance(ServerConfig.class); + File keystoreFile = serverConfig.getKeystoreFile(); + if (keystoreFile != null) { + String password = serverConfig.getKeystorePassword(); + for (Map.Entry entry: new PKCS12CertExtractor(keystoreFile, password).extact().entrySet()) + trustCertContent.addAll(Splitter.on('\n').trimResults().splitToList(entry.getValue())); + } + if (serverConfig.getTrustCertsDir() != null) { + for (File file: serverConfig.getTrustCertsDir().listFiles()) { + if (file.isFile()) { + try { + trustCertContent.addAll(FileUtils.readLines(file, UTF_8)); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + } + return trustCertContent; + } + + protected String getErrorMessage(Exception exception) { + Response response = ExceptionUtils.buildResponse(exception); + if (response != null) + return response.getEntity().toString(); + else + return Throwables.getStackTraceAsString(exception); + } + +} diff --git a/server-core/src/main/java/io/onedev/server/search/entity/agent/AgentQuery.java b/server-core/src/main/java/io/onedev/server/search/entity/agent/AgentQuery.java index 5382324e1f..72d6548f9d 100644 --- a/server-core/src/main/java/io/onedev/server/search/entity/agent/AgentQuery.java +++ b/server-core/src/main/java/io/onedev/server/search/entity/agent/AgentQuery.java @@ -1,29 +1,5 @@ package io.onedev.server.search.entity.agent; -import static io.onedev.server.model.Agent.NAME_CPU; -import static io.onedev.server.model.Agent.NAME_IP_ADDRESS; -import static io.onedev.server.model.Agent.NAME_MEMORY; -import static io.onedev.server.model.Agent.NAME_NAME; -import static io.onedev.server.model.Agent.NAME_OS_NAME; -import static io.onedev.server.model.Agent.NAME_OS_ARCH; -import static io.onedev.server.model.Agent.NAME_OS_VERSION; -import static io.onedev.server.model.Agent.ORDER_FIELDS; -import static io.onedev.server.model.Agent.QUERY_FIELDS; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import javax.annotation.Nullable; - -import org.antlr.v4.runtime.BailErrorStrategy; -import org.antlr.v4.runtime.BaseErrorListener; -import org.antlr.v4.runtime.CharStream; -import org.antlr.v4.runtime.CharStreams; -import org.antlr.v4.runtime.CommonTokenStream; -import org.antlr.v4.runtime.RecognitionException; -import org.antlr.v4.runtime.Recognizer; - import io.onedev.commons.codeassist.AntlrUtils; import io.onedev.commons.utils.ExplicitException; import io.onedev.server.OneDev; @@ -32,20 +8,19 @@ import io.onedev.server.model.Agent; import io.onedev.server.search.entity.EntityQuery; import io.onedev.server.search.entity.EntitySort; import io.onedev.server.search.entity.EntitySort.Direction; -import io.onedev.server.search.entity.agent.AgentQueryParser.AndCriteriaContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.CriteriaContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.FieldOperatorValueCriteriaContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.NotCriteriaContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.OperatorCriteriaContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.OperatorValueCriteriaContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.OrCriteriaContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.OrderContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.ParensCriteriaContext; -import io.onedev.server.search.entity.agent.AgentQueryParser.QueryContext; +import io.onedev.server.search.entity.agent.AgentQueryParser.*; import io.onedev.server.util.criteria.AndCriteria; import io.onedev.server.util.criteria.Criteria; import io.onedev.server.util.criteria.NotCriteria; import io.onedev.server.util.criteria.OrCriteria; +import org.antlr.v4.runtime.*; + +import javax.annotation.Nullable; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import static io.onedev.server.model.Agent.*; public class AgentQuery extends EntityQuery { @@ -154,19 +129,9 @@ public class AgentQuery extends EntityQuery { return new OsArchCriteria(value); case NAME_IP_ADDRESS: return new IpAddressCriteria(value); - case NAME_CPU: - return new CpuCriteria(value, operator); - case NAME_MEMORY: - return new MemoryCriteria(value, operator); default: return new AttributeCriteria(fieldName, value); } - case AgentQueryLexer.IsGreaterThan: - case AgentQueryLexer.IsLessThan: - if (fieldName.equals(NAME_CPU)) - return new CpuCriteria(value, operator); - else - return new MemoryCriteria(value, operator); default: throw new IllegalStateException(); } @@ -227,15 +192,6 @@ public class AgentQuery extends EntityQuery { Collection attributeNames = OneDev.getInstance(AgentAttributeManager.class).getAttributeNames(); if (!QUERY_FIELDS.contains(fieldName) && !attributeNames.contains(fieldName)) throw new ExplicitException("Attribute not found: " + fieldName); - switch (operator) { - case AgentQueryLexer.IsGreaterThan: - case AgentQueryLexer.IsLessThan: - if (!fieldName.equals(NAME_CPU) - && !fieldName.equals(NAME_MEMORY)) { - throw newOperatorException(fieldName, operator); - } - break; - } } public static String getRuleName(int rule) { diff --git a/server-core/src/main/java/io/onedev/server/search/entity/agent/CpuCriteria.java b/server-core/src/main/java/io/onedev/server/search/entity/agent/CpuCriteria.java deleted file mode 100644 index 2e8078eec9..0000000000 --- a/server-core/src/main/java/io/onedev/server/search/entity/agent/CpuCriteria.java +++ /dev/null @@ -1,54 +0,0 @@ -package io.onedev.server.search.entity.agent; - -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.From; -import javax.persistence.criteria.Path; -import javax.persistence.criteria.Predicate; - -import io.onedev.server.model.Agent; -import io.onedev.server.search.entity.EntityQuery; -import io.onedev.server.util.criteria.Criteria; - -public class CpuCriteria extends Criteria { - - private static final long serialVersionUID = 1L; - - private int value; - - private int operator; - - public CpuCriteria(String value, int operator) { - this.value = EntityQuery.getIntValue(value); - this.operator = operator; - } - - @Override - public Predicate getPredicate(CriteriaQuery query, From from, CriteriaBuilder builder) { - Path attribute = from.get(Agent.PROP_CPU); - if (operator == AgentQueryLexer.IsGreaterThan) - return builder.greaterThan(attribute, value); - else if (operator == AgentQueryLexer.IsLessThan) - return builder.lessThan(attribute, value); - else - return builder.equal(attribute, value); - } - - @Override - public boolean matches(Agent agent) { - if (operator == AgentQueryLexer.IsGreaterThan) - return agent.getCpu() > value; - else if (operator == AgentQueryLexer.IsLessThan) - return agent.getCpu() < value; - else - return agent.getCpu() < value; - } - - @Override - public String toStringWithoutParens() { - return quote(Agent.NAME_CPU) + " " - + AgentQuery.getRuleName(operator) + " " - + quote(String.valueOf(value)); - } - -} diff --git a/server-core/src/main/java/io/onedev/server/search/entity/agent/MemoryCriteria.java b/server-core/src/main/java/io/onedev/server/search/entity/agent/MemoryCriteria.java deleted file mode 100644 index 5085cc578a..0000000000 --- a/server-core/src/main/java/io/onedev/server/search/entity/agent/MemoryCriteria.java +++ /dev/null @@ -1,54 +0,0 @@ -package io.onedev.server.search.entity.agent; - -import javax.persistence.criteria.CriteriaBuilder; -import javax.persistence.criteria.CriteriaQuery; -import javax.persistence.criteria.From; -import javax.persistence.criteria.Path; -import javax.persistence.criteria.Predicate; - -import io.onedev.server.model.Agent; -import io.onedev.server.search.entity.EntityQuery; -import io.onedev.server.util.criteria.Criteria; - -public class MemoryCriteria extends Criteria { - - private static final long serialVersionUID = 1L; - - private int value; - - private int operator; - - public MemoryCriteria(String value, int operator) { - this.value = EntityQuery.getIntValue(value); - this.operator = operator; - } - - @Override - public Predicate getPredicate(CriteriaQuery query, From from, CriteriaBuilder builder) { - Path attribute = from.get(Agent.PROP_MEMORY); - if (operator == AgentQueryLexer.IsGreaterThan) - return builder.greaterThan(attribute, value); - else if (operator == AgentQueryLexer.IsLessThan) - return builder.lessThan(attribute, value); - else - return builder.equal(attribute, value); - } - - @Override - public boolean matches(Agent agent) { - if (operator == AgentQueryLexer.IsGreaterThan) - return agent.getMemory() > value; - else if (operator == AgentQueryLexer.IsLessThan) - return agent.getMemory() < value; - else - return agent.getMemory() < value; - } - - @Override - public String toStringWithoutParens() { - return quote(Agent.NAME_MEMORY) + " " - + AgentQuery.getRuleName(operator) + " " - + quote(String.valueOf(value)); - } - -} diff --git a/server-core/src/main/java/io/onedev/server/util/concurrent/DefaultWorkExecutor.java b/server-core/src/main/java/io/onedev/server/util/concurrent/DefaultWorkExecutor.java index 7117de796c..d6a5ddeae9 100644 --- a/server-core/src/main/java/io/onedev/server/util/concurrent/DefaultWorkExecutor.java +++ b/server-core/src/main/java/io/onedev/server/util/concurrent/DefaultWorkExecutor.java @@ -19,27 +19,32 @@ import javax.inject.Inject; import javax.inject.Singleton; import io.onedev.server.ServerConfig; +import io.onedev.server.entitymanager.SettingManager; import io.onedev.server.security.SecurityUtils; @Singleton public class DefaultWorkExecutor implements WorkExecutor { + private final SettingManager settingManager; + private final ExecutorService executorService; private final Map>> runnings = new HashMap<>(); private final Map>> waitings = new HashMap<>(); - private final int concurrency; - @Inject - public DefaultWorkExecutor(ExecutorService executorService, ServerConfig serverConfig) { + public DefaultWorkExecutor(ExecutorService executorService, SettingManager settingManager) { this.executorService = executorService; - concurrency = serverConfig.getServerCpu() / 1000; + this.settingManager = settingManager; + } + + private int getConcurrency() { + return settingManager.getPerformanceSetting().getCpuIntensiveTaskConcurrency(); } private synchronized void check() { - if (concurrency > runnings.size()) { + if (getConcurrency() > runnings.size()) { Map averagePriorities = new HashMap<>(); for (Map.Entry>> entry: waitings.entrySet()) { int totalPriorities = 0; @@ -64,7 +69,7 @@ public class DefaultWorkExecutor implements WorkExecutor { runningsOfGroup.add(future.callable); } runnings.put(groupId, runningsOfGroup); - if (runnings.size() == concurrency) + if (runnings.size() == getConcurrency()) break; } } diff --git a/server-core/src/main/java/io/onedev/server/web/behavior/AgentQueryBehavior.java b/server-core/src/main/java/io/onedev/server/web/behavior/AgentQueryBehavior.java index 9463b56d2c..74b0bec16a 100644 --- a/server-core/src/main/java/io/onedev/server/web/behavior/AgentQueryBehavior.java +++ b/server-core/src/main/java/io/onedev/server/web/behavior/AgentQueryBehavior.java @@ -1,13 +1,7 @@ package io.onedev.server.web.behavior; -import java.util.ArrayList; -import java.util.List; - -import org.apache.commons.lang3.StringUtils; - import com.google.common.base.Optional; import com.google.common.base.Preconditions; - import io.onedev.commons.codeassist.FenceAware; import io.onedev.commons.codeassist.InputSuggestion; import io.onedev.commons.codeassist.grammar.LexerRuleRefElementSpec; @@ -26,6 +20,11 @@ import io.onedev.server.util.DateUtils; import io.onedev.server.web.behavior.inputassist.ANTLRAssistBehavior; import io.onedev.server.web.behavior.inputassist.InputAssistBehavior; import io.onedev.server.web.util.SuggestionUtils; +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; @SuppressWarnings("serial") public class AgentQueryBehavior extends ANTLRAssistBehavior { @@ -46,6 +45,7 @@ public class AgentQueryBehavior extends ANTLRAssistBehavior { @Override protected List match(String matchWith) { + AgentManager agentManager = OneDev.getInstance(AgentManager.class); AgentAttributeManager attributeManager = OneDev.getInstance(AgentAttributeManager.class); if ("criteriaField".equals(spec.getLabel())) { List fields = new ArrayList<>(Agent.QUERY_FIELDS); @@ -73,14 +73,19 @@ public class AgentQueryBehavior extends ANTLRAssistBehavior { String fieldName = AgentQuery.getValue(fieldElements.get(0).getMatchedText()); try { AgentQuery.checkField(fieldName, operator); - if (fieldName.equals(Agent.NAME_OS_NAME)) - return SuggestionUtils.suggest(OneDev.getInstance(AgentManager.class).getOsNames(), matchWith); - else if (fieldName.equals(Agent.NAME_NAME)) + if (fieldName.equals(Agent.NAME_OS_NAME)) { + var osNames = new ArrayList<>(agentManager.getOsNames()); + Collections.sort(osNames); + return SuggestionUtils.suggest(osNames, matchWith); + } else if (fieldName.equals(Agent.NAME_NAME)) { return SuggestionUtils.suggestAgents(matchWith); - else if (fieldName.equals(Agent.NAME_OS_ARCH)) - return SuggestionUtils.suggest(OneDev.getInstance(AgentManager.class).getOsArchs(), matchWith); - else + } else if (fieldName.equals(Agent.NAME_OS_ARCH)) { + var osArchs = new ArrayList<>(agentManager.getOsArchs()); + Collections.sort(osArchs); + return SuggestionUtils.suggest(osArchs, matchWith); + } else { return null; + } } catch (ExplicitException ex) { } } diff --git a/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentListPanel.java b/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentListPanel.java index 7956e99edc..77cdb1dda7 100644 --- a/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentListPanel.java +++ b/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentListPanel.java @@ -846,46 +846,6 @@ class AgentListPanel extends Panel { }); - columns.add(new AbstractColumn(Model.of("CPU")) { - - @Override - public void populateItem(Item> cellItem, String componentId, IModel rowModel) { - Agent agent = rowModel.getObject(); - cellItem.add(new Label(componentId, agent.getCpu())); - } - - @Override - public String getCssClass() { - return "d-none d-xl-table-cell"; - } - - @Override - public Component getHeader(String componentId) { - return new Fragment(componentId, "cpuHeaderFrag", AgentListPanel.this); - } - - }); - - columns.add(new AbstractColumn(Model.of("Memory")) { - - @Override - public void populateItem(Item> cellItem, String componentId, IModel rowModel) { - Agent agent = rowModel.getObject(); - cellItem.add(new Label(componentId, agent.getMemory())); - } - - @Override - public String getCssClass() { - return "d-none d-xl-table-cell"; - } - - @Override - public Component getHeader(String componentId) { - return new Fragment(componentId, "memoryHeaderFrag", AgentListPanel.this); - } - - }); - columns.add(new AbstractColumn(Model.of("Temporal")) { @Override diff --git a/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentOverviewPage.html b/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentOverviewPage.html index e242ab676c..c8bbcbb1e4 100644 --- a/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentOverviewPage.html +++ b/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentOverviewPage.html @@ -10,8 +10,6 @@ IP Address OS Version OS Arch - CPU - Memory Temporal Status @@ -32,12 +30,6 @@ - - - - - - diff --git a/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentOverviewPage.java b/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentOverviewPage.java index 308468a62e..6f98d1effc 100644 --- a/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentOverviewPage.java +++ b/server-core/src/main/java/io/onedev/server/web/page/admin/buildsetting/agent/AgentOverviewPage.java @@ -94,8 +94,6 @@ public class AgentOverviewPage extends AgentDetailPage { add(new Label("ipAddress", getAgent().getIpAddress())); add(new Label("osVersion", getAgent().getOsVersion())); add(new Label("osArch", getAgent().getOsArch())); - add(new Label("cpu", getAgent().getCpu())); - add(new Label("memory", getAgent().getMemory())); add(new Label("temporal", getAgent().isTemporal())); add(new AgentStatusBadge("status", agentModel)); diff --git a/server-plugin/pom.xml b/server-plugin/pom.xml index 52ee124a0b..8d490929c8 100644 --- a/server-plugin/pom.xml +++ b/server-plugin/pom.xml @@ -6,7 +6,7 @@ io.onedev server - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-archetype/pom.xml b/server-plugin/server-plugin-archetype/pom.xml index b14f605c78..94e39ffc71 100644 --- a/server-plugin/server-plugin-archetype/pom.xml +++ b/server-plugin/server-plugin-archetype/pom.xml @@ -6,7 +6,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-authenticator-ldap/pom.xml b/server-plugin/server-plugin-authenticator-ldap/pom.xml index 58ab25e794..5318f2cae5 100644 --- a/server-plugin/server-plugin-authenticator-ldap/pom.xml +++ b/server-plugin/server-plugin-authenticator-ldap/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.authenticator.ldap.LdapModule diff --git a/server-plugin/server-plugin-buildspec-gradle/pom.xml b/server-plugin/server-plugin-buildspec-gradle/pom.xml index e2e249b47a..a64afa7fdc 100644 --- a/server-plugin/server-plugin-buildspec-gradle/pom.xml +++ b/server-plugin/server-plugin-buildspec-gradle/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.buildspec.gradle.GradleModule diff --git a/server-plugin/server-plugin-buildspec-maven/pom.xml b/server-plugin/server-plugin-buildspec-maven/pom.xml index 96f9291c70..ddad176363 100644 --- a/server-plugin/server-plugin-buildspec-maven/pom.xml +++ b/server-plugin/server-plugin-buildspec-maven/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.buildspec.maven.MavenModule diff --git a/server-plugin/server-plugin-buildspec-node/pom.xml b/server-plugin/server-plugin-buildspec-node/pom.xml index 6c2e12a8f8..59779fa5b7 100644 --- a/server-plugin/server-plugin-buildspec-node/pom.xml +++ b/server-plugin/server-plugin-buildspec-node/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.buildspec.node.NodePluginModule diff --git a/server-plugin/server-plugin-executor-kubernetes/pom.xml b/server-plugin/server-plugin-executor-kubernetes/pom.xml index c8807e2480..32ef9e3c60 100644 --- a/server-plugin/server-plugin-executor-kubernetes/pom.xml +++ b/server-plugin/server-plugin-executor-kubernetes/pom.xml @@ -6,7 +6,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.executor.kubernetes.KubernetesModule diff --git a/server-plugin/server-plugin-executor-kubernetes/src/main/java/io/onedev/server/plugin/executor/kubernetes/KubernetesExecutor.java b/server-plugin/server-plugin-executor-kubernetes/src/main/java/io/onedev/server/plugin/executor/kubernetes/KubernetesExecutor.java index d6884a258d..448c9b39fa 100644 --- a/server-plugin/server-plugin-executor-kubernetes/src/main/java/io/onedev/server/plugin/executor/kubernetes/KubernetesExecutor.java +++ b/server-plugin/server-plugin-executor-kubernetes/src/main/java/io/onedev/server/plugin/executor/kubernetes/KubernetesExecutor.java @@ -1,45 +1,5 @@ package io.onedev.server.plugin.executor.kubernetes; -import static io.onedev.k8shelper.KubernetesHelper.ENV_JOB_TOKEN; -import static io.onedev.k8shelper.KubernetesHelper.ENV_OS_INFO; -import static io.onedev.k8shelper.KubernetesHelper.ENV_SERVER_URL; -import static io.onedev.k8shelper.KubernetesHelper.IMAGE_REPO_PREFIX; -import static io.onedev.k8shelper.KubernetesHelper.LOG_END_MESSAGE; -import static io.onedev.k8shelper.KubernetesHelper.parseStepPosition; -import static io.onedev.k8shelper.KubernetesHelper.stringifyStepPosition; - -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.IOException; -import java.io.Serializable; -import java.nio.charset.StandardCharsets; -import java.time.Instant; -import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeParseException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicReference; - -import javax.annotation.Nullable; -import javax.validation.constraints.NotEmpty; - -import org.apache.commons.codec.binary.Base64; -import org.apache.commons.codec.binary.Hex; -import org.apache.commons.lang.SerializationUtils; -import org.apache.commons.lang3.SystemUtils; -import org.apache.commons.text.WordUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -47,44 +7,27 @@ import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.collect.Lists; import com.google.common.collect.Sets; - import io.onedev.agent.job.FailedException; -import io.onedev.commons.utils.ExceptionUtils; -import io.onedev.commons.utils.ExplicitException; -import io.onedev.commons.utils.FileUtils; -import io.onedev.commons.utils.StringUtils; -import io.onedev.commons.utils.TaskLogger; +import io.onedev.commons.utils.*; import io.onedev.commons.utils.command.Commandline; import io.onedev.commons.utils.command.ExecutionResult; import io.onedev.commons.utils.command.LineConsumer; -import io.onedev.k8shelper.Action; -import io.onedev.k8shelper.BuildImageFacade; -import io.onedev.k8shelper.CommandFacade; -import io.onedev.k8shelper.CompositeFacade; -import io.onedev.k8shelper.ExecuteCondition; -import io.onedev.k8shelper.KubernetesHelper; -import io.onedev.k8shelper.LeafFacade; -import io.onedev.k8shelper.LeafVisitor; -import io.onedev.k8shelper.OsContainer; -import io.onedev.k8shelper.OsExecution; -import io.onedev.k8shelper.OsInfo; -import io.onedev.k8shelper.RegistryLoginFacade; -import io.onedev.k8shelper.RunContainerFacade; +import io.onedev.k8shelper.*; import io.onedev.server.OneDev; import io.onedev.server.ServerConfig; import io.onedev.server.buildspec.Service; import io.onedev.server.buildspec.job.EnvVar; +import io.onedev.server.cluster.ClusterManager; import io.onedev.server.entitymanager.SettingManager; -import io.onedev.server.job.AgentInfo; import io.onedev.server.job.JobContext; -import io.onedev.server.job.ResourceAllocator; +import io.onedev.server.job.JobManager; +import io.onedev.server.job.JobRunnable; import io.onedev.server.model.support.RegistryLogin; import io.onedev.server.model.support.administration.jobexecutor.JobExecutor; import io.onedev.server.model.support.administration.jobexecutor.NodeSelectorEntry; import io.onedev.server.model.support.administration.jobexecutor.ServiceLocator; import io.onedev.server.model.support.inputspec.SecretInput; import io.onedev.server.plugin.executor.kubernetes.KubernetesExecutor.TestData; -import io.onedev.server.search.entity.agent.AgentQuery; import io.onedev.server.terminal.CommandlineShell; import io.onedev.server.terminal.Shell; import io.onedev.server.terminal.Terminal; @@ -94,6 +37,32 @@ import io.onedev.server.web.editable.annotation.Editable; import io.onedev.server.web.editable.annotation.Horizontal; import io.onedev.server.web.editable.annotation.OmitName; import io.onedev.server.web.util.Testable; +import org.apache.commons.codec.binary.Base64; +import org.apache.commons.codec.binary.Hex; +import org.apache.commons.lang.SerializationUtils; +import org.apache.commons.lang3.RandomUtils; +import org.apache.commons.lang3.SystemUtils; +import org.apache.commons.text.WordUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +import javax.annotation.Nullable; +import javax.validation.constraints.NotEmpty; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.util.*; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; + +import static io.onedev.k8shelper.KubernetesHelper.*; @Editable(order=KubernetesExecutor.ORDER, description="This executor runs build jobs as pods in a kubernetes cluster. " + "No any agents are required." @@ -106,8 +75,6 @@ public class KubernetesExecutor extends JobExecutor implements TestableKubernetes resource management for details") + @NotEmpty + public String getCpuRequest() { + return cpuRequest; + } + + public void setCpuRequest(String cpuRequest) { + this.cpuRequest = cpuRequest; + } + + @Editable(order=500, description = "Specify memory request for jobs using this executor. " + + "Check Kubernetes resource management for details") + @NotEmpty + public String getMemoryRequest() { + return memoryRequest; + } + + public void setMemoryRequest(String memoryRequest) { + this.memoryRequest = memoryRequest; + } + public void setMountContainerSock(boolean mountContainerSock) { this.mountContainerSock = mountContainerSock; } + @Editable(order=24990, group="More Settings", placeholder = "No limit", description = "" + + "Optionally specify cpu limit for jobs using this executor. " + + "Check Kubernetes resource management for details") + public String getCpuLimit() { + return cpuLimit; + } + + public void setCpuLimit(String cpuLimit) { + this.cpuLimit = cpuLimit; + } + + @Editable(order=24995, group="More Settings", placeholder = "No limit", description = "" + + "Optionally specify memory limit for jobs using this executor. " + + "Check Kubernetes resource management for details") + public String getMemoryLimit() { + return memoryLimit; + } + + public void setMemoryLimit(String memoryLimit) { + this.memoryLimit = memoryLimit; + } + @Editable(order=25000, group="More Settings", description="Optionally specify where to run service pods " + "specified in job. The first matching locator will be used. If no any locators are found, " + "node selector of the executor will be used") @@ -212,13 +231,103 @@ public class KubernetesExecutor extends JobExecutor implements Testable(OneDev.getInstance(ClusterManager.class) + .getHazelcastInstance().getCluster().getMembers()); + var serverUUID = servers.get(RandomUtils.nextInt(0, servers.size())).getUuid(); + + getJobManager().runJob(serverUUID, ()-> { + getJobManager().runJobLocal(jobContext, new JobRunnable() { - @Override - public void execute(JobContext jobContext, TaskLogger jobLogger, AgentInfo agentInfo) { - execute(jobLogger, jobContext); + private static final long serialVersionUID = 1L; + + @Override + public void run(TaskLogger jobLogger) { + execute(jobLogger, jobContext); + } + + @Override + public void resume(JobContext jobContext) { + if (osInfo != null) { + Commandline kubectl = newKubeCtl(); + kubectl.addArgs("exec", "job", "--container", "sidecar", "--namespace", getNamespace(jobContext), "--"); + if (osInfo.isLinux()) + kubectl.addArgs("touch", "/onedev-build/continue"); + else + kubectl.addArgs("cmd", "-c", "copy", "NUL", "C:\\onedev-build\\continue"); + kubectl.execute(new LineConsumer() { + + @Override + public void consume(String line) { + logger.debug(line); + } + + }, new LineConsumer() { + + @Override + public void consume(String line) { + logger.error("Kubernetes: " + line); + } + + }).checkReturnCode(); + } + } + + @Override + public Shell openShell(JobContext jobContext, Terminal terminal) { + String containerNameCopy = containerName; + if (osInfo != null && containerNameCopy != null) { + Commandline kubectl = newKubeCtl(); + kubectl.addArgs("exec", "-it", POD_NAME, "-c", containerNameCopy, + "--namespace", getNamespace(jobContext), "--"); + + String workingDir; + if (containerNameCopy.startsWith("step-")) { + List stepPosition = parseStepPosition(containerNameCopy.substring("step-".length())); + LeafFacade step = Preconditions.checkNotNull(jobContext.getStep(stepPosition)); + if (step instanceof RunContainerFacade) + workingDir = ((RunContainerFacade)step).getContainer(osInfo).getWorkingDir(); + else if (osInfo.isLinux()) + workingDir = "/onedev-build/workspace"; + else + workingDir = "C:\\onedev-build\\workspace"; + } else if (osInfo.isLinux()) { + workingDir = "/onedev-build/workspace"; + } else { + workingDir = "C:\\onedev-build\\workspace"; + } + + String[] shell = null; + if (containerNameCopy.startsWith("step-")) { + List stepPosition = parseStepPosition(containerNameCopy.substring("step-".length())); + LeafFacade step = Preconditions.checkNotNull(jobContext.getStep(stepPosition)); + if (step instanceof CommandFacade) + shell = ((CommandFacade)step).getShell(osInfo.isWindows(), workingDir); + } + if (shell == null) { + if (workingDir != null) { + if (osInfo.isLinux()) + shell = new String[]{"sh", "-c", String.format("cd '%s' && sh", workingDir)}; + else + shell = new String[]{"cmd", "/c", String.format("cd %s && cmd", workingDir)}; + } else if (osInfo.isLinux()) { + shell = new String[]{"sh"}; + } else { + shell = new String[]{"cmd"}; + } + } + kubectl.addArgs(shell); + return new CommandlineShell(terminal, kubectl); + } else { + throw new ExplicitException("Shell not ready"); + } + } + }); + }); + } + + private JobManager getJobManager() { + return OneDev.getInstance(JobManager.class); } private String getNamespace(@Nullable JobContext jobContext) { @@ -230,83 +339,6 @@ public class KubernetesExecutor extends JobExecutor implements Testable stepPosition = parseStepPosition(containerNameCopy.substring("step-".length())); - LeafFacade step = Preconditions.checkNotNull(jobContext.getStep(stepPosition)); - if (step instanceof RunContainerFacade) - workingDir = ((RunContainerFacade)step).getContainer(osInfo).getWorkingDir(); - else if (osInfo.isLinux()) - workingDir = "/onedev-build/workspace"; - else - workingDir = "C:\\onedev-build\\workspace"; - } else if (osInfo.isLinux()) { - workingDir = "/onedev-build/workspace"; - } else { - workingDir = "C:\\onedev-build\\workspace"; - } - - String[] shell = null; - if (containerNameCopy.startsWith("step-")) { - List stepPosition = parseStepPosition(containerNameCopy.substring("step-".length())); - LeafFacade step = Preconditions.checkNotNull(jobContext.getStep(stepPosition)); - if (step instanceof CommandFacade) - shell = ((CommandFacade)step).getShell(osInfo.isWindows(), workingDir); - } - if (shell == null) { - if (workingDir != null) { - if (osInfo.isLinux()) - shell = new String[]{"sh", "-c", String.format("cd '%s' && sh", workingDir)}; - else - shell = new String[]{"cmd", "/c", String.format("cd %s && cmd", workingDir)}; - } else if (osInfo.isLinux()) { - shell = new String[]{"sh"}; - } else { - shell = new String[]{"cmd"}; - } - } - kubectl.addArgs(shell); - return new CommandlineShell(terminal, kubectl); - } else { - throw new ExplicitException("Shell not ready"); - } - } - @Override public boolean isPlaceholderAllowed() { return false; @@ -419,7 +451,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable resourcesSpec = CollectionUtils.newLinkedHashMap( + "requests", CollectionUtils.newLinkedHashMap( + "cpu", getCpuRequest(), + "memory", getMemoryRequest())); + Map limitsSpec = new LinkedHashMap<>(); + if (getCpuLimit() != null) + limitsSpec.put("cpu", getCpuLimit()); + if (getMemoryLimit() != null) + limitsSpec.put("memory", getMemoryLimit()); + if (!limitsSpec.isEmpty()) + resourcesSpec.put("limits", limitsSpec); + containerSpec.put("resources", resourcesSpec); List> envs = new ArrayList<>(); for (EnvVar envVar: jobService.getEnvVars()) { envs.add(CollectionUtils.newLinkedHashMap( @@ -804,7 +842,6 @@ public class KubernetesExecutor extends JobExecutor implements Testable requestsSpec = CollectionUtils.newLinkedHashMap( + "cpu", "0", + "memory", "0"); + Map limitsSpec = new LinkedHashMap<>(); + if (getCpuLimit() != null) + limitsSpec.put("cpu", getCpuLimit()); + if (getMemoryLimit() != null) + limitsSpec.put("memory", getMemoryLimit()); + if (!limitsSpec.isEmpty()) { + stepContainerSpec.put( + "resources", CollectionUtils.newLinkedHashMap( + "limits", limitsSpec, + "requests", requestsSpec)); + } + containerSpecs.add(stepContainerSpec); return null; @@ -1063,11 +1115,9 @@ public class KubernetesExecutor extends JobExecutor implements Testable nodeNameRef = new AtomicReference<>(null); - watchPod(namespace, POD_NAME, new AbortChecker() { + watchPod(namespace, new AbortChecker() { @Override public Abort check(String nodeName, Collection containerStatusNodes) { @@ -1140,6 +1190,9 @@ public class KubernetesExecutor extends JobExecutor implements Testable containerStatusNodes) { @@ -1191,10 +1244,10 @@ public class KubernetesExecutor extends JobExecutor implements Testable containerStatusNodes) { @@ -1333,7 +1386,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable abortRef = new AtomicReference<>(null); StringBuilder json = new StringBuilder(); - kubectl.addArgs("get", "pod", podName, "-n", namespace, "--watch", "-o", "json"); - - kubectl.timeout(POD_WATCH_TIMEOUT); + kubectl.addArgs("get", "pod", POD_NAME, "-n", namespace, "--watch", "-o", "json"); Thread thread = Thread.currentThread(); diff --git a/server-plugin/server-plugin-executor-remotedocker/pom.xml b/server-plugin/server-plugin-executor-remotedocker/pom.xml index 48ae347c7a..07d143498e 100644 --- a/server-plugin/server-plugin-executor-remotedocker/pom.xml +++ b/server-plugin/server-plugin-executor-remotedocker/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-executor-remotedocker/src/main/java/io/onedev/server/plugin/executor/remotedocker/RemoteDockerExecutor.java b/server-plugin/server-plugin-executor-remotedocker/src/main/java/io/onedev/server/plugin/executor/remotedocker/RemoteDockerExecutor.java index 3d5cd344ca..d9b6431f2b 100644 --- a/server-plugin/server-plugin-executor-remotedocker/src/main/java/io/onedev/server/plugin/executor/remotedocker/RemoteDockerExecutor.java +++ b/server-plugin/server-plugin-executor-remotedocker/src/main/java/io/onedev/server/plugin/executor/remotedocker/RemoteDockerExecutor.java @@ -1,16 +1,5 @@ package io.onedev.server.plugin.executor.remotedocker; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.concurrent.TimeoutException; - -import org.eclipse.jetty.websocket.api.Session; - -import io.onedev.agent.AgentData; import io.onedev.agent.Message; import io.onedev.agent.MessageTypes; import io.onedev.agent.WebsocketUtils; @@ -21,13 +10,12 @@ import io.onedev.commons.utils.TaskLogger; import io.onedev.server.OneDev; import io.onedev.server.buildspec.Service; import io.onedev.server.cluster.ClusterManager; -import io.onedev.server.job.AgentInfo; -import io.onedev.server.job.JobContext; -import io.onedev.server.job.ResourceAllocator; -import io.onedev.server.job.ResourceRunnable; +import io.onedev.server.entitymanager.AgentManager; +import io.onedev.server.job.*; import io.onedev.server.job.log.LogManager; import io.onedev.server.job.log.LogTask; import io.onedev.server.model.support.RegistryLogin; +import io.onedev.server.persistence.SessionManager; import io.onedev.server.plugin.executor.serverdocker.ServerDockerExecutor; import io.onedev.server.search.entity.agent.AgentQuery; import io.onedev.server.terminal.AgentShell; @@ -35,6 +23,15 @@ import io.onedev.server.terminal.Shell; import io.onedev.server.terminal.Terminal; import io.onedev.server.util.CollectionUtils; import io.onedev.server.web.editable.annotation.Editable; +import io.onedev.server.web.editable.annotation.Numeric; +import org.eclipse.jetty.websocket.api.Session; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.TimeoutException; @Editable(order=210, description="This executor runs build jobs as docker containers on remote machines via agents") public class RemoteDockerExecutor extends ServerDockerExecutor { @@ -42,7 +39,7 @@ public class RemoteDockerExecutor extends ServerDockerExecutor { private static final long serialVersionUID = 1L; private String agentQuery; - + private transient volatile Session agentSession; @Editable(order=390, name="Agent Selector", placeholder="Any agent", @@ -56,41 +53,104 @@ public class RemoteDockerExecutor extends ServerDockerExecutor { this.agentQuery = agentQuery; } + @Editable(order=450, placeholder = "Number of agent cpu", description = "" + + "Specify max number of jobs/services this executor can run concurrently on each matched agent") + @Numeric @Override - public AgentQuery getAgentRequirement() { - return AgentQuery.parse(agentQuery, true); + public String getConcurrency() { + return super.getConcurrency(); + } + + @Override + public void setConcurrency(String concurrency) { + super.setConcurrency(concurrency); + } + + private AgentManager getAgentManager() { + return OneDev.getInstance(AgentManager.class); + } + + private JobManager getJobManager() { + return OneDev.getInstance(JobManager.class); + } + + private SessionManager getSessionManager() { + return OneDev.getInstance(SessionManager.class); + } + + private int getConcurrencyNumber() { + if (getConcurrency() != null) + return Integer.parseInt(getConcurrency()); + else + return 0; } @Override - public void execute(JobContext jobContext, TaskLogger jobLogger, AgentInfo agentInfo) { - jobLogger.log(String.format("Executing job (executor: %s, agent: %s)...", - getName(), agentInfo.getData().getName())); + public void execute(JobContext jobContext) { + AgentRunnable runnable = (agentId) -> { + getJobManager().runJobLocal(jobContext, new JobRunnable() { + + @Override + public void run(TaskLogger jobLogger) { + notifyJobRunning(jobContext.getBuildId(), agentId); + + var agentData = getSessionManager().call( + () -> getAgentManager().load(agentId).getAgentData()); - List> registryLogins = new ArrayList<>(); - for (RegistryLogin login: getRegistryLogins()) { - registryLogins.add(CollectionUtils.newHashMap( - "url", login.getRegistryUrl(), - "userName", login.getUserName(), - "password", login.getPassword())); - } - - List> services = new ArrayList<>(); - for (Service service: jobContext.getServices()) - services.add(service.toMap()); + agentSession = getAgentManager().getAgentSession(agentId); + if (agentSession == null) + throw new ExplicitException("Allocated agent not connected to current server, please retry later"); - String jobToken = jobContext.getJobToken(); - List trustCertContent = getTrustCertContent(); - DockerJobData jobData = new DockerJobData(jobToken, getName(), jobContext.getProjectPath(), - jobContext.getProjectId(), jobContext.getRefName(), jobContext.getCommitId().name(), - jobContext.getBuildNumber(), jobContext.getActions(), jobContext.getRetried(), - services, registryLogins, isMountDockerSock(), getDockerSockPath(), trustCertContent, getRunOptions()); + jobLogger.log(String.format("Executing job (executor: %s, agent: %s)...", + getName(), agentData.getName())); + + List> registryLogins = new ArrayList<>(); + for (RegistryLogin login : getRegistryLogins()) { + registryLogins.add(CollectionUtils.newHashMap( + "url", login.getRegistryUrl(), + "userName", login.getUserName(), + "password", login.getPassword())); + } + + List> services = new ArrayList<>(); + for (Service service : jobContext.getServices()) + services.add(service.toMap()); + + String jobToken = jobContext.getJobToken(); + List trustCertContent = getTrustCertContent(); + DockerJobData jobData = new DockerJobData(jobToken, getName(), jobContext.getProjectPath(), + jobContext.getProjectId(), jobContext.getRefName(), jobContext.getCommitId().name(), + jobContext.getBuildNumber(), jobContext.getActions(), jobContext.getRetried(), + services, registryLogins, isMountDockerSock(), getDockerSockPath(), trustCertContent, + getCpuLimit(), getMemoryLimit(), getRunOptions()); + + try { + WebsocketUtils.call(agentSession, jobData, 0); + } catch (InterruptedException | TimeoutException e) { + new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); + } + } + + @Override + public void resume(JobContext jobContext) { + if (agentSession != null ) + new Message(MessageTypes.RESUME_JOB, jobContext.getJobToken()).sendBy(agentSession); + } + + @Override + public Shell openShell(JobContext jobContext, Terminal terminal) { + if (agentSession != null) + return new AgentShell(terminal, agentSession, jobContext.getJobToken()); + else + throw new ExplicitException("Shell not ready"); + } + + }); + }; - agentSession = agentInfo.getSession(); - try { - WebsocketUtils.call(agentSession, jobData, 0); - } catch (InterruptedException | TimeoutException e) { - new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); - } + getResourceAllocator().runAgentJob( + AgentQuery.parse(agentQuery, true), getName(), getConcurrencyNumber(), + jobContext.getServices().size()+1, runnable); } private LogManager getLogManager() { @@ -112,105 +172,68 @@ public class RemoteDockerExecutor extends ServerDockerExecutor { try { UUID localServerUUID = getClusterManager().getLocalServerUUID(); jobLogger.log("Waiting for resources..."); - getResourceAllocator().run( - new TestRunnable(jobToken, this, testData, localServerUUID), - getAgentRequirement(), new HashMap<>()); + + AgentRunnable runnable = agentId -> { + TaskLogger currentJobLogger = new TaskLogger() { + + @Override + public void log(String message, String sessionId) { + getClusterManager().runOnServer( + localServerUUID, + new LogTask(jobToken, message, sessionId)); + } + + }; + + var agentData = getSessionManager().call( + () -> getAgentManager().load(agentId).getAgentData()); + + Session agentSession = getAgentManager().getAgentSession(agentId); + if (agentSession == null) + throw new ExplicitException("Allocated agent not connected to current server, please retry later"); + + currentJobLogger.log(String.format("Testing on agent '%s'...", agentData.getName())); + + List> registryLogins = new ArrayList<>(); + for (RegistryLogin login: getRegistryLogins()) { + registryLogins.add(CollectionUtils.newHashMap( + "url", login.getRegistryUrl(), + "userName", login.getUserName(), + "password", login.getPassword())); + } + + TestDockerJobData jobData = new TestDockerJobData(getName(), jobToken, + testData.getDockerImage(), registryLogins, getRunOptions()); + + if (getLogManager().getJobLogger(jobToken) == null) { + getLogManager().addJobLogger(jobToken, currentJobLogger); + try { + WebsocketUtils.call(agentSession, jobData, 0); + } catch (InterruptedException | TimeoutException e) { + new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); + } finally { + getLogManager().removeJobLogger(jobToken); + } + } else { + try { + WebsocketUtils.call(agentSession, jobData, 0); + } catch (InterruptedException | TimeoutException e) { + new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); + } + } + }; + + getResourceAllocator().runAgentJob( + AgentQuery.parse(agentQuery, true), getName(), + getConcurrencyNumber(), 1, runnable); } finally { getLogManager().removeJobLogger(jobToken); } } - private void testLocal(String jobToken, AgentInfo agentInfo, - TestData testData, UUID dispatcherServerUUID) { - TaskLogger jobLogger = new TaskLogger() { - - @Override - public void log(String message, String sessionId) { - getClusterManager().runOnServer( - dispatcherServerUUID, - new LogTask(jobToken, message, sessionId)); - } - - }; - - AgentData agentData = agentInfo.getData(); - Session agentSession = agentInfo.getSession(); - - jobLogger.log(String.format("Testing on agent '%s'...", agentData.getName())); - - List> registryLogins = new ArrayList<>(); - for (RegistryLogin login: getRegistryLogins()) { - registryLogins.add(CollectionUtils.newHashMap( - "url", login.getRegistryUrl(), - "userName", login.getUserName(), - "password", login.getPassword())); - } - - TestDockerJobData jobData = new TestDockerJobData(getName(), jobToken, - testData.getDockerImage(), registryLogins, getRunOptions()); - - if (getLogManager().getJobLogger(jobToken) == null) { - getLogManager().addJobLogger(jobToken, jobLogger); - try { - WebsocketUtils.call(agentSession, jobData, 0); - } catch (InterruptedException | TimeoutException e) { - new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); - } finally { - getLogManager().removeJobLogger(jobToken); - } - } else { - try { - WebsocketUtils.call(agentSession, jobData, 0); - } catch (InterruptedException | TimeoutException e) { - new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); - } - } - } - - @Override - public void resume(JobContext jobContext) { - if (agentSession != null ) - new Message(MessageTypes.RESUME_JOB, jobContext.getJobToken()).sendBy(agentSession); - } - - @Override - public Shell openShell(JobContext jobContext, Terminal terminal) { - if (agentSession != null) - return new AgentShell(terminal, agentSession, jobContext.getJobToken()); - else - throw new ExplicitException("Shell not ready"); - } - @Override public String getDockerExecutable() { return super.getDockerExecutable(); } - private static class TestRunnable implements ResourceRunnable { - - private static final long serialVersionUID = 1L; - - private final String jobToken; - - private final RemoteDockerExecutor jobExecutor; - - private final TestData testData; - - private final UUID dispatcherServerUUID; - - public TestRunnable(String jobToken, RemoteDockerExecutor jobExecutor, - TestData testData, UUID dispatcherServerUUID) { - this.jobToken = jobToken; - this.jobExecutor = jobExecutor; - this.testData = testData; - this.dispatcherServerUUID = dispatcherServerUUID; - } - - @Override - public void run(AgentInfo agentInfo) { - jobExecutor.testLocal(jobToken, agentInfo, testData, dispatcherServerUUID); - } - - } - } \ No newline at end of file diff --git a/server-plugin/server-plugin-executor-remoteshell/pom.xml b/server-plugin/server-plugin-executor-remoteshell/pom.xml index 11dfa4f94c..3e87125a4c 100644 --- a/server-plugin/server-plugin-executor-remoteshell/pom.xml +++ b/server-plugin/server-plugin-executor-remoteshell/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-executor-remoteshell/src/main/java/io/onedev/server/plugin/executor/remoteshell/RemoteShellExecutor.java b/server-plugin/server-plugin-executor-remoteshell/src/main/java/io/onedev/server/plugin/executor/remoteshell/RemoteShellExecutor.java index 333bfab07d..2a23a1f937 100644 --- a/server-plugin/server-plugin-executor-remoteshell/src/main/java/io/onedev/server/plugin/executor/remoteshell/RemoteShellExecutor.java +++ b/server-plugin/server-plugin-executor-remoteshell/src/main/java/io/onedev/server/plugin/executor/remoteshell/RemoteShellExecutor.java @@ -1,14 +1,5 @@ package io.onedev.server.plugin.executor.remoteshell; -import java.io.File; -import java.util.HashMap; -import java.util.List; -import java.util.UUID; -import java.util.concurrent.TimeoutException; - -import org.eclipse.jetty.websocket.api.Session; - -import io.onedev.agent.AgentData; import io.onedev.agent.Message; import io.onedev.agent.MessageTypes; import io.onedev.agent.WebsocketUtils; @@ -19,12 +10,11 @@ import io.onedev.commons.utils.TaskLogger; import io.onedev.server.OneDev; import io.onedev.server.buildspec.job.CacheSpec; import io.onedev.server.cluster.ClusterManager; -import io.onedev.server.job.AgentInfo; -import io.onedev.server.job.JobContext; -import io.onedev.server.job.ResourceAllocator; -import io.onedev.server.job.ResourceRunnable; +import io.onedev.server.entitymanager.AgentManager; +import io.onedev.server.job.*; import io.onedev.server.job.log.LogManager; import io.onedev.server.job.log.LogTask; +import io.onedev.server.persistence.SessionManager; import io.onedev.server.plugin.executor.servershell.ServerShellExecutor; import io.onedev.server.search.entity.agent.AgentQuery; import io.onedev.server.terminal.AgentShell; @@ -32,6 +22,13 @@ import io.onedev.server.terminal.Shell; import io.onedev.server.terminal.Terminal; import io.onedev.server.web.editable.annotation.Editable; import io.onedev.server.web.editable.annotation.Horizontal; +import io.onedev.server.web.editable.annotation.Numeric; +import org.eclipse.jetty.websocket.api.Session; + +import java.io.File; +import java.util.List; +import java.util.UUID; +import java.util.concurrent.TimeoutException; @Editable(order=500, name="Remote Shell Executor", description="" + "This executor runs build jobs with remote machines's shell facility via agents
" @@ -58,40 +55,92 @@ public class RemoteShellExecutor extends ServerShellExecutor { this.agentQuery = agentQuery; } + @Editable(order=1000, placeholder = "Number of agent cpu", description = "" + + "Specify max number of jobs this executor can run concurrently on " + + "each matched agent") + @Numeric @Override - public AgentQuery getAgentRequirement() { - return AgentQuery.parse(agentQuery, true); + public String getConcurrency() { + return super.getConcurrency(); + } + + @Override + public void setConcurrency(String concurrency) { + super.setConcurrency(concurrency); + } + + private int getConcurrencyNumber() { + if (getConcurrency() != null) + return Integer.parseInt(getConcurrency()); + else + return 0; } @Override - public void execute(JobContext jobContext, TaskLogger jobLogger, AgentInfo agentInfo) { - jobLogger.log(String.format("Executing job (executor: %s, agent: %s)...", - getName(), agentInfo.getData().getName())); + public void execute(JobContext jobContext) { + AgentRunnable runnable = (agentId) -> { + getJobManager().runJobLocal(jobContext, new JobRunnable() { - if (!jobContext.getServices().isEmpty()) { - throw new ExplicitException("This job requires services, which can only be supported " - + "by docker aware executors"); - } - - for (CacheSpec cacheSpec: jobContext.getCacheSpecs()) { - if (new File(cacheSpec.getPath()).isAbsolute()) { - throw new ExplicitException("Shell executor does not support " - + "absolute cache path: " + cacheSpec.getPath()); - } - } - - String jobToken = jobContext.getJobToken(); - List trustCertContent = getTrustCertContent(); - ShellJobData jobData = new ShellJobData(jobToken, getName(), jobContext.getProjectPath(), - jobContext.getProjectId(), jobContext.getRefName(), jobContext.getCommitId().name(), - jobContext.getBuildNumber(), jobContext.getActions(), trustCertContent); + private static final long serialVersionUID = 1L; - agentSession = agentInfo.getSession(); - try { - WebsocketUtils.call(agentSession, jobData, 0); - } catch (InterruptedException | TimeoutException e) { - new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); - } + @Override + public void run(TaskLogger jobLogger) { + notifyJobRunning(jobContext.getBuildId(), agentId); + + var agentData = getSessionManager().call( + () -> getAgentManager().load(agentId).getAgentData()); + + agentSession = getAgentManager().getAgentSession(agentId); + if (agentSession == null) + throw new ExplicitException("Allocated agent not connected to current server, please retry later"); + + jobLogger.log(String.format("Executing job (executor: %s, agent: %s)...", + getName(), agentData.getName())); + + if (!jobContext.getServices().isEmpty()) { + throw new ExplicitException("This job requires services, which can only be supported " + + "by docker aware executors"); + } + + for (CacheSpec cacheSpec : jobContext.getCacheSpecs()) { + if (new File(cacheSpec.getPath()).isAbsolute()) { + throw new ExplicitException("Shell executor does not support " + + "absolute cache path: " + cacheSpec.getPath()); + } + } + + String jobToken = jobContext.getJobToken(); + List trustCertContent = getTrustCertContent(); + ShellJobData jobData = new ShellJobData(jobToken, getName(), jobContext.getProjectPath(), + jobContext.getProjectId(), jobContext.getRefName(), jobContext.getCommitId().name(), + jobContext.getBuildNumber(), jobContext.getActions(), trustCertContent); + + try { + WebsocketUtils.call(agentSession, jobData, 0); + } catch (InterruptedException | TimeoutException e) { + new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); + } + } + + @Override + public void resume(JobContext jobContext) { + if (agentSession != null) + new Message(MessageTypes.RESUME_JOB, jobContext.getJobToken()).sendBy(agentSession); + } + + @Override + public Shell openShell(JobContext jobContext, Terminal terminal) { + if (agentSession != null) + return new AgentShell(terminal, agentSession, jobContext.getJobToken()); + else + throw new ExplicitException("Shell not ready"); + } + + }); + }; + + getResourceAllocator().runAgentJob(AgentQuery.parse(agentQuery, true), getName(), + getConcurrencyNumber(), 1, runnable); } private LogManager getLogManager() { @@ -102,101 +151,75 @@ public class RemoteShellExecutor extends ServerShellExecutor { return OneDev.getInstance(ClusterManager.class); } + public JobManager getJobManager() { + return OneDev.getInstance(JobManager.class); + } + private ResourceAllocator getResourceAllocator() { return OneDev.getInstance(ResourceAllocator.class); } + private AgentManager getAgentManager() { + return OneDev.getInstance(AgentManager.class); + } + + private SessionManager getSessionManager() { + return OneDev.getInstance(SessionManager.class); + } + @Override public void test(TestData testData, TaskLogger jobLogger) { String jobToken = UUID.randomUUID().toString(); - UUID localServerUUID = getClusterManager().getLocalServerUUID(); getLogManager().addJobLogger(jobToken, jobLogger); try { + UUID localServerUUID = getClusterManager().getLocalServerUUID(); jobLogger.log("Waiting for resources..."); - getResourceAllocator().run( - new TestRunnable(jobToken, this, testData, localServerUUID), - getAgentRequirement(), new HashMap<>()); + AgentRunnable runnable = agentId -> { + TaskLogger currentJobLogger = new TaskLogger() { + + @Override + public void log(String message, String sessionId) { + getClusterManager().runOnServer( + localServerUUID, + new LogTask(jobToken, message, sessionId)); + } + + }; + + var agentData = getSessionManager().call( + () -> getAgentManager().load(agentId).getAgentData()); + + Session agentSession = getAgentManager().getAgentSession(agentId); + if (agentSession == null) + throw new ExplicitException("Allocated agent not connected to current server, please retry later"); + + currentJobLogger.log(String.format("Testing on agent '%s'...", agentData.getName())); + + TestShellJobData jobData = new TestShellJobData(jobToken, testData.getCommands()); + + if (getLogManager().getJobLogger(jobToken) == null) { + getLogManager().addJobLogger(jobToken, currentJobLogger); + try { + WebsocketUtils.call(agentSession, jobData, 0); + } catch (InterruptedException | TimeoutException e) { + new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); + } finally { + getLogManager().removeJobLogger(jobToken); + } + } else { + try { + WebsocketUtils.call(agentSession, jobData, 0); + } catch (InterruptedException | TimeoutException e) { + new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); + } + } + }; + + getResourceAllocator().runAgentJob(AgentQuery.parse(agentQuery, true), getName(), + getConcurrencyNumber(), 1, runnable); } finally { getLogManager().removeJobLogger(jobToken); } } - private void testLocal(String jobToken, AgentInfo agentInfo, - TestData testData, UUID dispatcherMemberUUID) { - TaskLogger jobLogger = new TaskLogger() { - - @Override - public void log(String message, String sessionId) { - getClusterManager().runOnServer( - dispatcherMemberUUID, - new LogTask(jobToken, message, sessionId)); - } - - }; - - AgentData agentData = agentInfo.getData(); - Session agentSession = agentInfo.getSession(); - jobLogger.log(String.format("Testing on agent '%s'...", agentData.getName())); - - TestShellJobData jobData = new TestShellJobData(jobToken, testData.getCommands()); - - if (getLogManager().getJobLogger(jobToken) == null) { - getLogManager().addJobLogger(jobToken, jobLogger); - try { - WebsocketUtils.call(agentSession, jobData, 0); - } catch (InterruptedException | TimeoutException e) { - new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); - } finally { - getLogManager().removeJobLogger(jobToken); - } - } else { - try { - WebsocketUtils.call(agentSession, jobData, 0); - } catch (InterruptedException | TimeoutException e) { - new Message(MessageTypes.CANCEL_JOB, jobToken).sendBy(agentSession); - } - } - } - - @Override - public void resume(JobContext jobContext) { - if (agentSession != null) - new Message(MessageTypes.RESUME_JOB, jobContext.getJobToken()).sendBy(agentSession); - } - - @Override - public Shell openShell(JobContext jobContext, Terminal terminal) { - if (agentSession != null) - return new AgentShell(terminal, agentSession, jobContext.getJobToken()); - else - throw new ExplicitException("Shell not ready"); - } - - private static class TestRunnable implements ResourceRunnable { - - private static final long serialVersionUID = 1L; - - private final String jobToken; - - private final RemoteShellExecutor jobExecutor; - - private final TestData testData; - - private final UUID dispatcherServerUUID; - - public TestRunnable(String jobToken, RemoteShellExecutor jobExecutor, - TestData testData, UUID dispatcherServerUUID) { - this.jobToken = jobToken; - this.jobExecutor = jobExecutor; - this.testData = testData; - this.dispatcherServerUUID = dispatcherServerUUID; - } - - @Override - public void run(AgentInfo agentInfo) { - jobExecutor.testLocal(jobToken, agentInfo, testData, dispatcherServerUUID); - } - - } - } \ No newline at end of file diff --git a/server-plugin/server-plugin-executor-serverdocker/pom.xml b/server-plugin/server-plugin-executor-serverdocker/pom.xml index 1159d713c3..d988b3f26a 100644 --- a/server-plugin/server-plugin-executor-serverdocker/pom.xml +++ b/server-plugin/server-plugin-executor-serverdocker/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.executor.serverdocker.ServerDockerModule diff --git a/server-plugin/server-plugin-executor-serverdocker/src/main/java/io/onedev/server/plugin/executor/serverdocker/ServerDockerExecutor.java b/server-plugin/server-plugin-executor-serverdocker/src/main/java/io/onedev/server/plugin/executor/serverdocker/ServerDockerExecutor.java index 46b22d13de..a977641cee 100644 --- a/server-plugin/server-plugin-executor-serverdocker/src/main/java/io/onedev/server/plugin/executor/serverdocker/ServerDockerExecutor.java +++ b/server-plugin/server-plugin-executor-serverdocker/src/main/java/io/onedev/server/plugin/executor/serverdocker/ServerDockerExecutor.java @@ -1,88 +1,50 @@ package io.onedev.server.plugin.executor.serverdocker; -import static io.onedev.agent.DockerExecutorUtils.createNetwork; -import static io.onedev.agent.DockerExecutorUtils.deleteDir; -import static io.onedev.agent.DockerExecutorUtils.deleteNetwork; -import static io.onedev.agent.DockerExecutorUtils.isUseProcessIsolation; -import static io.onedev.agent.DockerExecutorUtils.newDockerKiller; -import static io.onedev.agent.DockerExecutorUtils.startService; -import static io.onedev.k8shelper.KubernetesHelper.cloneRepository; -import static io.onedev.k8shelper.KubernetesHelper.installGitCert; -import static io.onedev.k8shelper.KubernetesHelper.stringifyStepPosition; - -import java.io.File; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicReference; - -import javax.annotation.Nullable; -import javax.validation.ConstraintValidatorContext; -import javax.validation.constraints.NotEmpty; - -import org.apache.commons.lang3.SystemUtils; - import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.hazelcast.cluster.Member; - import io.onedev.agent.DockerExecutorUtils; import io.onedev.agent.ExecutorUtils; import io.onedev.agent.job.FailedException; import io.onedev.commons.bootstrap.Bootstrap; import io.onedev.commons.loader.AppLoader; -import io.onedev.commons.utils.ExplicitException; -import io.onedev.commons.utils.FileUtils; -import io.onedev.commons.utils.PathUtils; -import io.onedev.commons.utils.StringUtils; -import io.onedev.commons.utils.TaskLogger; +import io.onedev.commons.utils.*; import io.onedev.commons.utils.command.Commandline; import io.onedev.commons.utils.command.ExecutionResult; import io.onedev.commons.utils.command.LineConsumer; -import io.onedev.k8shelper.BuildImageFacade; -import io.onedev.k8shelper.CacheAllocationRequest; -import io.onedev.k8shelper.CacheInstance; -import io.onedev.k8shelper.CheckoutFacade; -import io.onedev.k8shelper.CloneInfo; -import io.onedev.k8shelper.CommandFacade; -import io.onedev.k8shelper.CompositeFacade; -import io.onedev.k8shelper.JobCache; -import io.onedev.k8shelper.KubernetesHelper; -import io.onedev.k8shelper.LeafFacade; -import io.onedev.k8shelper.LeafHandler; -import io.onedev.k8shelper.OsContainer; -import io.onedev.k8shelper.OsExecution; -import io.onedev.k8shelper.OsInfo; -import io.onedev.k8shelper.RunContainerFacade; -import io.onedev.k8shelper.ServerSideFacade; +import io.onedev.k8shelper.*; import io.onedev.server.OneDev; import io.onedev.server.buildspec.Service; import io.onedev.server.cluster.ClusterManager; +import io.onedev.server.cluster.ClusterRunnable; import io.onedev.server.git.location.GitLocation; -import io.onedev.server.job.AgentInfo; import io.onedev.server.job.JobContext; import io.onedev.server.job.JobManager; +import io.onedev.server.job.JobRunnable; +import io.onedev.server.job.ResourceAllocator; import io.onedev.server.model.support.RegistryLogin; import io.onedev.server.model.support.administration.jobexecutor.JobExecutor; import io.onedev.server.plugin.executor.serverdocker.ServerDockerExecutor.TestData; -import io.onedev.server.search.entity.agent.AgentQuery; import io.onedev.server.terminal.CommandlineShell; import io.onedev.server.terminal.Shell; import io.onedev.server.terminal.Terminal; import io.onedev.server.util.EditContext; import io.onedev.server.util.validation.Validatable; import io.onedev.server.util.validation.annotation.ClassValidating; -import io.onedev.server.web.editable.annotation.Editable; -import io.onedev.server.web.editable.annotation.Horizontal; -import io.onedev.server.web.editable.annotation.OmitName; -import io.onedev.server.web.editable.annotation.ShowCondition; +import io.onedev.server.web.editable.annotation.*; import io.onedev.server.web.util.Testable; +import org.apache.commons.lang3.SystemUtils; + +import javax.annotation.Nullable; +import javax.validation.ConstraintValidatorContext; +import javax.validation.constraints.NotEmpty; +import java.io.File; +import java.io.Serializable; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; + +import static io.onedev.agent.DockerExecutorUtils.*; +import static io.onedev.k8shelper.KubernetesHelper.*; @Editable(order=ServerDockerExecutor.ORDER, name="Server Docker Executor", description="This executor runs build jobs as docker containers on OneDev server") @@ -106,6 +68,12 @@ public class ServerDockerExecutor extends JobExecutor implements Testable" + "WARNING: Malicious jobs can take control of whole OneDev " @@ -152,8 +131,31 @@ public class ServerDockerExecutor extends JobExecutor implements Testable--cpus " + + "of relevant containers") + public String getCpuLimit() { + return cpuLimit; + } + + public void setCpuLimit(String cpuLimit) { + this.cpuLimit = cpuLimit; + } + + @Editable(order=50020, group="More Settings", placeholder = "No limit", description = "" + + "Optionally specify memory limit of jobs/services using this executor. This will be " + + "used as option --memory " + + "of relevant containers") + public String getMemoryLimit() { + return memoryLimit; + } + + public void setMemoryLimit(String memoryLimit) { + this.memoryLimit = memoryLimit; + } + + @Editable(order=50050, group="More Settings", description="Optionally specify options to run container") public String getRunOptions() { return runOptions; } @@ -189,11 +191,6 @@ public class ServerDockerExecutor extends JobExecutor implements Testable allocate(CacheAllocationRequest request) { - return getJobManager().allocateCaches(jobContext, request); - } - - @Override - protected void delete(File cacheDir) { - deleteDir(cacheDir, newDocker(), Bootstrap.isInDocker()); - } - - }; - cache.init(false); - - login(jobLogger); - - createNetwork(newDocker(), network, jobLogger); - try { - OsInfo osInfo = OneDev.getInstance(OsInfo.class); - - for (Service jobService: jobContext.getServices()) { - jobLogger.log("Starting service (name: " + jobService.getName() + ", image: " + jobService.getImage() + ")..."); - startService(newDocker(), network, jobService.toMap(), osInfo, jobLogger); - } - - File hostWorkspace = new File(hostBuildHome, "workspace"); - FileUtils.createDir(hostWorkspace); - - AtomicReference hostAuthInfoHome = new AtomicReference<>(null); - try { - cache.installSymbolinks(hostWorkspace); - - jobLogger.log("Copying job dependencies..."); - getJobManager().copyDependencies(jobContext, hostWorkspace); - - String containerBuildHome; - String containerWorkspace; - if (SystemUtils.IS_OS_WINDOWS) { - containerBuildHome = "C:\\onedev-build"; - containerWorkspace = "C:\\onedev-build\\workspace"; - } else { - containerBuildHome = "/onedev-build"; - containerWorkspace = "/onedev-build/workspace"; - } - - getJobManager().reportJobWorkspace(jobContext, containerWorkspace); - CompositeFacade entryFacade = new CompositeFacade(jobContext.getActions()); - boolean successful = entryFacade.execute(new LeafHandler() { - - private int runStepContainer(String image, @Nullable String entrypoint, - List arguments, Map environments, - @Nullable String workingDir, Map volumeMounts, - List position, boolean useTTY) { - // Uninstall symbol links as docker can not process it well - cache.uninstallSymbolinks(hostWorkspace); - containerName = network + "-step-" + stringifyStepPosition(position); - try { - Commandline docker = newDocker(); - docker.addArgs("run", "--name=" + containerName, "--network=" + network); - if (getRunOptions() != null) - docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions())); - - docker.addArgs("-v", getHostPath(hostBuildHome.getAbsolutePath()) + ":" + containerBuildHome); - - for (Map.Entry entry: volumeMounts.entrySet()) { - if (entry.getKey().contains("..")) - throw new ExplicitException("Volume mount source path should not contain '..'"); - String hostPath = getHostPath(new File(hostWorkspace, entry.getKey()).getAbsolutePath()); - docker.addArgs("-v", hostPath + ":" + entry.getValue()); - } - - if (entrypoint != null) { - docker.addArgs("-w", containerWorkspace); - } else if (workingDir != null) { - if (workingDir.contains("..")) - throw new ExplicitException("Container working dir should not contain '..'"); - docker.addArgs("-w", workingDir); - } - - for (Map.Entry entry: cache.getAllocations().entrySet()) { - String hostCachePath = entry.getKey().getDirectory(hostCacheHome).getAbsolutePath(); - String containerCachePath = PathUtils.resolve(containerWorkspace, entry.getValue()); - docker.addArgs("-v", getHostPath(hostCachePath) + ":" + containerCachePath); - } - - if (isMountDockerSock()) { - if (getDockerSockPath() != null) { - if (SystemUtils.IS_OS_WINDOWS) - docker.addArgs("-v", getDockerSockPath() + "://./pipe/docker_engine"); - else - docker.addArgs("-v",getDockerSockPath() + ":/var/run/docker.sock"); - } else { - if (SystemUtils.IS_OS_WINDOWS) - docker.addArgs("-v", "//./pipe/docker_engine://./pipe/docker_engine"); - else - docker.addArgs("-v", "/var/run/docker.sock:/var/run/docker.sock"); - } - } - - if (hostAuthInfoHome.get() != null) { - String hostPath = getHostPath(hostAuthInfoHome.get().getAbsolutePath()); - if (SystemUtils.IS_OS_WINDOWS) { - docker.addArgs("-v", hostPath + ":C:\\Users\\ContainerAdministrator\\auth-info"); - docker.addArgs("-v", hostPath + ":C:\\Users\\ContainerUser\\auth-info"); - } else { - docker.addArgs("-v", hostPath + ":/root/auth-info"); - } - } - - for (Map.Entry entry: environments.entrySet()) - docker.addArgs("-e", entry.getKey() + "=" + entry.getValue()); - - docker.addArgs("-e", "ONEDEV_WORKSPACE=" + containerWorkspace); - - if (useTTY) - docker.addArgs("-t"); - - if (entrypoint != null) - docker.addArgs("--entrypoint=" + entrypoint); - - if (isUseProcessIsolation(newDocker(), image, osInfo, jobLogger)) - docker.addArgs("--isolation=process"); - - docker.addArgs(image); - docker.addArgs(arguments.toArray(new String[arguments.size()])); - - ExecutionResult result = docker.execute(ExecutorUtils.newInfoLogger(jobLogger), - ExecutorUtils.newWarningLogger(jobLogger), null, newDockerKiller(newDocker(), - containerName, jobLogger)); - return result.getReturnCode(); - } finally { - containerName = null; - cache.installSymbolinks(hostWorkspace); - } - } - - @Override - public boolean execute(LeafFacade facade, List position) { - runningStep = facade; - try { - String stepNames = entryFacade.getNamesAsString(position); - jobLogger.notice("Running step \"" + stepNames + "\"..."); - - if (facade instanceof CommandFacade) { - CommandFacade commandFacade = (CommandFacade) facade; - - OsExecution execution = commandFacade.getExecution(osInfo); - if (execution.getImage() == null) { - throw new ExplicitException("This step can only be executed by server shell " - + "executor or remote shell executor"); - } - - Commandline entrypoint = DockerExecutorUtils.getEntrypoint( - hostBuildHome, commandFacade, osInfo, hostAuthInfoHome.get() != null); - - int exitCode = runStepContainer(execution.getImage(), entrypoint.executable(), - entrypoint.arguments(), new HashMap<>(), null, new HashMap<>(), - position, commandFacade.isUseTTY()); - - if (exitCode != 0) { - jobLogger.error("Step \"" + stepNames + "\" is failed: Command exited with code " + exitCode); - return false; - } - } else if (facade instanceof BuildImageFacade || facade instanceof BuildImageFacade) { - DockerExecutorUtils.buildImage(newDocker(), (BuildImageFacade) facade, - hostBuildHome, jobLogger); - } else if (facade instanceof RunContainerFacade) { - RunContainerFacade rubContainerFacade = (RunContainerFacade) facade; - OsContainer container = rubContainerFacade.getContainer(osInfo); - List arguments = new ArrayList<>(); - if (container.getArgs() != null) - arguments.addAll(Arrays.asList(StringUtils.parseQuoteTokens(container.getArgs()))); - int exitCode = runStepContainer(container.getImage(), null, arguments, container.getEnvMap(), - container.getWorkingDir(), container.getVolumeMounts(), position, rubContainerFacade.isUseTTY()); - if (exitCode != 0) { - jobLogger.error("Step \"" + stepNames + "\" is failed: Container exited with code " + exitCode); - return false; - } - } else if (facade instanceof CheckoutFacade) { - try { - CheckoutFacade checkoutFacade = (CheckoutFacade) facade; - jobLogger.log("Checking out code..."); - if (hostAuthInfoHome.get() == null) - hostAuthInfoHome.set(FileUtils.createTempDir()); - Commandline git = new Commandline(AppLoader.getInstance(GitLocation.class).getExecutable()); - - checkoutFacade.setupWorkingDir(git, hostWorkspace); - git.environments().put("HOME", hostAuthInfoHome.get().getAbsolutePath()); - - CloneInfo cloneInfo = checkoutFacade.getCloneInfo(); - - cloneInfo.writeAuthData(hostAuthInfoHome.get(), git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); - try { - List trustCertContent = getTrustCertContent(); - if (!trustCertContent.isEmpty()) { - installGitCert(new File(hostAuthInfoHome.get(), "trust-cert.pem"), trustCertContent, - git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); - } - - int cloneDepth = checkoutFacade.getCloneDepth(); - - cloneRepository(git, jobContext.getProjectGitDir(), cloneInfo.getCloneUrl(), - jobContext.getRefName(), jobContext.getCommitId().name(), - checkoutFacade.isWithLfs(), checkoutFacade.isWithSubmodules(), - cloneDepth, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); - } finally { - git.clearArgs(); - git.addArgs("config", "--global", "--unset", "core.sshCommand"); - ExecutionResult result = git.execute(ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); - if (result.getReturnCode() != 5 && result.getReturnCode() != 0) - result.checkReturnCode(); - } - } catch (Exception e) { - jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e)); - return false; - } - } else { - ServerSideFacade serverSideFacade = (ServerSideFacade) facade; - try { - serverSideFacade.execute(hostBuildHome, new ServerSideFacade.Runner() { - - @Override - public Map run(File inputDir, Map placeholderValues) { - return getJobManager().runServerStep(jobContext, position, inputDir, - placeholderValues, jobLogger); - } - - }); - } catch (Exception e) { - jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e)); - return false; - } - } - jobLogger.success("Step \"" + stepNames + "\" is successful"); - return true; - } finally { - runningStep = null; - } - } - - @Override - public void skip(LeafFacade facade, List position) { - jobLogger.notice("Step \"" + entryFacade.getNamesAsString(position) + "\" is skipped"); - } - - }, new ArrayList<>()); - - if (!successful) - throw new FailedException(); - } finally { - cache.uninstallSymbolinks(hostWorkspace); - // Fix https://code.onedev.io/onedev/server/~issues/597 - if (SystemUtils.IS_OS_WINDOWS) - FileUtils.deleteDir(hostWorkspace); - if (hostAuthInfoHome.get() != null) - FileUtils.deleteDir(hostAuthInfoHome.get()); - } - } finally { - deleteNetwork(newDocker(), network, jobLogger); - } - } finally { - synchronized (hostBuildHome) { - deleteDir(hostBuildHome, newDocker(), Bootstrap.isInDocker()); - } - } + private ResourceAllocator getResourceAllocator() { + return OneDev.getInstance(ResourceAllocator.class); } + private int getConcurrencyNumber() { + if (getConcurrency() != null) + return Integer.parseInt(getConcurrency()); + else + return 0; + } + @Override - public void resume(JobContext jobContext) { - if (hostBuildHome != null) synchronized (hostBuildHome) { - if (hostBuildHome.exists()) - FileUtils.touchFile(new File(hostBuildHome, "continue")); - } + public void execute(JobContext jobContext) { + ClusterRunnable runnable = () -> { + getJobManager().runJobLocal(jobContext, new JobRunnable() { + + private static final long serialVersionUID = 1L; + + @Override + public void run(TaskLogger jobLogger) { + notifyJobRunning(jobContext.getBuildId(), null); + + if (OneDev.getK8sService() != null) { + throw new ExplicitException("" + + "OneDev running inside kubernetes cluster does not support server docker executor. " + + "Please use kubernetes executor instead"); + } + + hostBuildHome = FileUtils.createTempDir("onedev-build"); + try { + String network = getName() + "-" + jobContext.getProjectId() + "-" + + jobContext.getBuildNumber() + "-" + jobContext.getRetried(); + + Member member = getClusterManager().getHazelcastInstance().getCluster().getLocalMember(); + jobLogger.log(String.format("Executing job (executor: %s, server: %s, network: %s)...", getName(), + member.getAddress().getHost() + ":" + member.getAddress().getPort(), network)); + + File hostCacheHome = getCacheHome(jobContext.getJobExecutor()); + + jobLogger.log("Setting up job cache..."); + JobCache cache = new JobCache(hostCacheHome) { + + @Override + protected Map allocate(CacheAllocationRequest request) { + return getJobManager().allocateCaches(jobContext, request); + } + + @Override + protected void delete(File cacheDir) { + deleteDir(cacheDir, newDocker(), Bootstrap.isInDocker()); + } + + }; + cache.init(false); + + login(jobLogger); + + createNetwork(newDocker(), network, jobLogger); + try { + OsInfo osInfo = OneDev.getInstance(OsInfo.class); + + for (Service jobService : jobContext.getServices()) { + jobLogger.log("Starting service (name: " + jobService.getName() + ", image: " + jobService.getImage() + ")..."); + startService(newDocker(), network, jobService.toMap(), osInfo, getCpuLimit(), getMemoryLimit(), jobLogger); + } + + File hostWorkspace = new File(hostBuildHome, "workspace"); + FileUtils.createDir(hostWorkspace); + + AtomicReference hostAuthInfoHome = new AtomicReference<>(null); + try { + cache.installSymbolinks(hostWorkspace); + + jobLogger.log("Copying job dependencies..."); + getJobManager().copyDependencies(jobContext, hostWorkspace); + + String containerBuildHome; + String containerWorkspace; + if (SystemUtils.IS_OS_WINDOWS) { + containerBuildHome = "C:\\onedev-build"; + containerWorkspace = "C:\\onedev-build\\workspace"; + } else { + containerBuildHome = "/onedev-build"; + containerWorkspace = "/onedev-build/workspace"; + } + + getJobManager().reportJobWorkspace(jobContext, containerWorkspace); + CompositeFacade entryFacade = new CompositeFacade(jobContext.getActions()); + boolean successful = entryFacade.execute(new LeafHandler() { + + private int runStepContainer(String image, @Nullable String entrypoint, + List arguments, Map environments, + @Nullable String workingDir, Map volumeMounts, + List position, boolean useTTY) { + // Uninstall symbol links as docker can not process it well + cache.uninstallSymbolinks(hostWorkspace); + containerName = network + "-step-" + stringifyStepPosition(position); + try { + Commandline docker = newDocker(); + docker.addArgs("run", "--name=" + containerName, "--network=" + network); + if (getCpuLimit() != null) + docker.addArgs("--cpus", getCpuLimit()); + if (getMemoryLimit() != null) + docker.addArgs("--memory", getMemoryLimit()); + if (getRunOptions() != null) + docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions())); + + docker.addArgs("-v", getHostPath(hostBuildHome.getAbsolutePath()) + ":" + containerBuildHome); + + for (Map.Entry entry : volumeMounts.entrySet()) { + if (entry.getKey().contains("..")) + throw new ExplicitException("Volume mount source path should not contain '..'"); + String hostPath = getHostPath(new File(hostWorkspace, entry.getKey()).getAbsolutePath()); + docker.addArgs("-v", hostPath + ":" + entry.getValue()); + } + + if (entrypoint != null) { + docker.addArgs("-w", containerWorkspace); + } else if (workingDir != null) { + if (workingDir.contains("..")) + throw new ExplicitException("Container working dir should not contain '..'"); + docker.addArgs("-w", workingDir); + } + + for (Map.Entry entry : cache.getAllocations().entrySet()) { + String hostCachePath = entry.getKey().getDirectory(hostCacheHome).getAbsolutePath(); + String containerCachePath = PathUtils.resolve(containerWorkspace, entry.getValue()); + docker.addArgs("-v", getHostPath(hostCachePath) + ":" + containerCachePath); + } + + if (isMountDockerSock()) { + if (getDockerSockPath() != null) { + if (SystemUtils.IS_OS_WINDOWS) + docker.addArgs("-v", getDockerSockPath() + "://./pipe/docker_engine"); + else + docker.addArgs("-v", getDockerSockPath() + ":/var/run/docker.sock"); + } else { + if (SystemUtils.IS_OS_WINDOWS) + docker.addArgs("-v", "//./pipe/docker_engine://./pipe/docker_engine"); + else + docker.addArgs("-v", "/var/run/docker.sock:/var/run/docker.sock"); + } + } + + if (hostAuthInfoHome.get() != null) { + String hostPath = getHostPath(hostAuthInfoHome.get().getAbsolutePath()); + if (SystemUtils.IS_OS_WINDOWS) { + docker.addArgs("-v", hostPath + ":C:\\Users\\ContainerAdministrator\\auth-info"); + docker.addArgs("-v", hostPath + ":C:\\Users\\ContainerUser\\auth-info"); + } else { + docker.addArgs("-v", hostPath + ":/root/auth-info"); + } + } + + for (Map.Entry entry : environments.entrySet()) + docker.addArgs("-e", entry.getKey() + "=" + entry.getValue()); + + docker.addArgs("-e", "ONEDEV_WORKSPACE=" + containerWorkspace); + + if (useTTY) + docker.addArgs("-t"); + + if (entrypoint != null) + docker.addArgs("--entrypoint=" + entrypoint); + + if (isUseProcessIsolation(newDocker(), image, osInfo, jobLogger)) + docker.addArgs("--isolation=process"); + + docker.addArgs(image); + docker.addArgs(arguments.toArray(new String[arguments.size()])); + + ExecutionResult result = docker.execute(ExecutorUtils.newInfoLogger(jobLogger), + ExecutorUtils.newWarningLogger(jobLogger), null, newDockerKiller(newDocker(), + containerName, jobLogger)); + return result.getReturnCode(); + } finally { + containerName = null; + cache.installSymbolinks(hostWorkspace); + } + } + + @Override + public boolean execute(LeafFacade facade, List position) { + runningStep = facade; + try { + String stepNames = entryFacade.getNamesAsString(position); + jobLogger.notice("Running step \"" + stepNames + "\"..."); + + if (facade instanceof CommandFacade) { + CommandFacade commandFacade = (CommandFacade) facade; + + OsExecution execution = commandFacade.getExecution(osInfo); + if (execution.getImage() == null) { + throw new ExplicitException("This step can only be executed by server shell " + + "executor or remote shell executor"); + } + + Commandline entrypoint = DockerExecutorUtils.getEntrypoint( + hostBuildHome, commandFacade, osInfo, hostAuthInfoHome.get() != null); + + int exitCode = runStepContainer(execution.getImage(), entrypoint.executable(), + entrypoint.arguments(), new HashMap<>(), null, new HashMap<>(), + position, commandFacade.isUseTTY()); + + if (exitCode != 0) { + jobLogger.error("Step \"" + stepNames + "\" is failed: Command exited with code " + exitCode); + return false; + } + } else if (facade instanceof BuildImageFacade || facade instanceof BuildImageFacade) { + DockerExecutorUtils.buildImage(newDocker(), (BuildImageFacade) facade, + hostBuildHome, jobLogger); + } else if (facade instanceof RunContainerFacade) { + RunContainerFacade rubContainerFacade = (RunContainerFacade) facade; + OsContainer container = rubContainerFacade.getContainer(osInfo); + List arguments = new ArrayList<>(); + if (container.getArgs() != null) + arguments.addAll(Arrays.asList(StringUtils.parseQuoteTokens(container.getArgs()))); + int exitCode = runStepContainer(container.getImage(), null, arguments, container.getEnvMap(), + container.getWorkingDir(), container.getVolumeMounts(), position, rubContainerFacade.isUseTTY()); + if (exitCode != 0) { + jobLogger.error("Step \"" + stepNames + "\" is failed: Container exited with code " + exitCode); + return false; + } + } else if (facade instanceof CheckoutFacade) { + try { + CheckoutFacade checkoutFacade = (CheckoutFacade) facade; + jobLogger.log("Checking out code..."); + if (hostAuthInfoHome.get() == null) + hostAuthInfoHome.set(FileUtils.createTempDir()); + Commandline git = new Commandline(AppLoader.getInstance(GitLocation.class).getExecutable()); + + checkoutFacade.setupWorkingDir(git, hostWorkspace); + git.environments().put("HOME", hostAuthInfoHome.get().getAbsolutePath()); + + CloneInfo cloneInfo = checkoutFacade.getCloneInfo(); + + cloneInfo.writeAuthData(hostAuthInfoHome.get(), git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); + try { + List trustCertContent = getTrustCertContent(); + if (!trustCertContent.isEmpty()) { + installGitCert(new File(hostAuthInfoHome.get(), "trust-cert.pem"), trustCertContent, + git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); + } + + int cloneDepth = checkoutFacade.getCloneDepth(); + + cloneRepository(git, jobContext.getProjectGitDir(), cloneInfo.getCloneUrl(), + jobContext.getRefName(), jobContext.getCommitId().name(), + checkoutFacade.isWithLfs(), checkoutFacade.isWithSubmodules(), + cloneDepth, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); + } finally { + git.clearArgs(); + git.addArgs("config", "--global", "--unset", "core.sshCommand"); + ExecutionResult result = git.execute(ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); + if (result.getReturnCode() != 5 && result.getReturnCode() != 0) + result.checkReturnCode(); + } + } catch (Exception e) { + jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e)); + return false; + } + } else { + ServerSideFacade serverSideFacade = (ServerSideFacade) facade; + try { + serverSideFacade.execute(hostBuildHome, new ServerSideFacade.Runner() { + + @Override + public Map run(File inputDir, Map placeholderValues) { + return getJobManager().runServerStep(jobContext, position, inputDir, + placeholderValues, jobLogger); + } + + }); + } catch (Exception e) { + jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e)); + return false; + } + } + jobLogger.success("Step \"" + stepNames + "\" is successful"); + return true; + } finally { + runningStep = null; + } + } + + @Override + public void skip(LeafFacade facade, List position) { + jobLogger.notice("Step \"" + entryFacade.getNamesAsString(position) + "\" is skipped"); + } + + }, new ArrayList<>()); + + if (!successful) + throw new FailedException(); + } finally { + cache.uninstallSymbolinks(hostWorkspace); + // Fix https://code.onedev.io/onedev/server/~issues/597 + if (SystemUtils.IS_OS_WINDOWS) + FileUtils.deleteDir(hostWorkspace); + if (hostAuthInfoHome.get() != null) + FileUtils.deleteDir(hostAuthInfoHome.get()); + } + } finally { + deleteNetwork(newDocker(), network, jobLogger); + } + } finally { + synchronized (hostBuildHome) { + deleteDir(hostBuildHome, newDocker(), Bootstrap.isInDocker()); + } + } + } + + @Override + public void resume(JobContext jobContext) { + if (hostBuildHome != null) synchronized (hostBuildHome) { + if (hostBuildHome.exists()) + FileUtils.touchFile(new File(hostBuildHome, "continue")); + } + } + + @Override + public Shell openShell(JobContext jobContext, Terminal terminal) { + String containerNameCopy = containerName; + if (containerNameCopy != null) { + Commandline docker = newDocker(); + docker.addArgs("exec", "-it", containerNameCopy); + if (runningStep instanceof CommandFacade) { + CommandFacade commandStep = (CommandFacade) runningStep; + docker.addArgs(commandStep.getShell(SystemUtils.IS_OS_WINDOWS, null)); + } else if (SystemUtils.IS_OS_WINDOWS) { + docker.addArgs("cmd"); + } else { + docker.addArgs("sh"); + } + return new CommandlineShell(terminal, docker); + } else if (hostBuildHome != null) { + Commandline shell; + if (SystemUtils.IS_OS_WINDOWS) + shell = new Commandline("cmd"); + else + shell = new Commandline("sh"); + shell.workingDir(new File(hostBuildHome, "workspace")); + return new CommandlineShell(terminal, shell); + } else { + throw new ExplicitException("Shell not ready"); + } + } + + }); + }; + getResourceAllocator().runServerJob(getName(), getConcurrencyNumber(), + jobContext.getServices().size() + 1, runnable); } private void login(TaskLogger jobLogger) { @@ -584,6 +639,10 @@ public class ServerDockerExecutor extends JobExecutor implements Testable io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.executor.servershell.ServerShellModule diff --git a/server-plugin/server-plugin-executor-servershell/src/main/java/io/onedev/server/plugin/executor/servershell/ServerShellExecutor.java b/server-plugin/server-plugin-executor-servershell/src/main/java/io/onedev/server/plugin/executor/servershell/ServerShellExecutor.java index 298eb88cf1..77dee837fe 100644 --- a/server-plugin/server-plugin-executor-servershell/src/main/java/io/onedev/server/plugin/executor/servershell/ServerShellExecutor.java +++ b/server-plugin/server-plugin-executor-servershell/src/main/java/io/onedev/server/plugin/executor/servershell/ServerShellExecutor.java @@ -1,24 +1,6 @@ package io.onedev.server.plugin.executor.servershell; -import static io.onedev.agent.ShellExecutorUtils.testCommands; -import static io.onedev.k8shelper.KubernetesHelper.cloneRepository; -import static io.onedev.k8shelper.KubernetesHelper.installGitCert; -import static io.onedev.k8shelper.KubernetesHelper.replacePlaceholders; - -import java.io.File; -import java.io.IOException; -import java.io.Serializable; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.validation.constraints.Size; - -import org.apache.commons.lang.SystemUtils; - import com.hazelcast.cluster.Member; - import io.onedev.agent.ExecutorUtils; import io.onedev.agent.job.FailedException; import io.onedev.commons.bootstrap.Bootstrap; @@ -28,37 +10,39 @@ import io.onedev.commons.utils.FileUtils; import io.onedev.commons.utils.TaskLogger; import io.onedev.commons.utils.command.Commandline; import io.onedev.commons.utils.command.ExecutionResult; -import io.onedev.k8shelper.BuildImageFacade; -import io.onedev.k8shelper.CacheAllocationRequest; -import io.onedev.k8shelper.CacheInstance; -import io.onedev.k8shelper.CheckoutFacade; -import io.onedev.k8shelper.CloneInfo; -import io.onedev.k8shelper.CommandFacade; -import io.onedev.k8shelper.CompositeFacade; -import io.onedev.k8shelper.JobCache; -import io.onedev.k8shelper.LeafFacade; -import io.onedev.k8shelper.LeafHandler; -import io.onedev.k8shelper.OsExecution; -import io.onedev.k8shelper.OsInfo; -import io.onedev.k8shelper.RunContainerFacade; -import io.onedev.k8shelper.ServerSideFacade; +import io.onedev.k8shelper.*; import io.onedev.server.OneDev; import io.onedev.server.cluster.ClusterManager; +import io.onedev.server.cluster.ClusterRunnable; import io.onedev.server.git.location.GitLocation; -import io.onedev.server.job.AgentInfo; import io.onedev.server.job.JobContext; import io.onedev.server.job.JobManager; +import io.onedev.server.job.JobRunnable; +import io.onedev.server.job.ResourceAllocator; import io.onedev.server.model.support.administration.jobexecutor.JobExecutor; import io.onedev.server.plugin.executor.servershell.ServerShellExecutor.TestData; -import io.onedev.server.search.entity.agent.AgentQuery; import io.onedev.server.terminal.CommandlineShell; import io.onedev.server.terminal.Shell; import io.onedev.server.terminal.Terminal; import io.onedev.server.util.validation.annotation.Code; import io.onedev.server.web.editable.annotation.Editable; import io.onedev.server.web.editable.annotation.Horizontal; +import io.onedev.server.web.editable.annotation.Numeric; import io.onedev.server.web.editable.annotation.OmitName; import io.onedev.server.web.util.Testable; +import org.apache.commons.lang.SystemUtils; + +import javax.validation.constraints.Size; +import java.io.File; +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static io.onedev.agent.ShellExecutorUtils.testCommands; +import static io.onedev.k8shelper.KubernetesHelper.*; @Editable(order=ServerShellExecutor.ORDER, name="Server Shell Executor", description="" + "This executor runs build jobs with OneDev server's shell facility.
" @@ -72,11 +56,24 @@ public class ServerShellExecutor extends JobExecutor implements Testable { + getJobManager().runJobLocal(jobContext, new JobRunnable() { + + private static final long serialVersionUID = 1L; @Override - protected Map allocate(CacheAllocationRequest request) { - return getJobManager().allocateCaches(jobContext, request); - } + public void run(TaskLogger jobLogger) { + notifyJobRunning(jobContext.getBuildId(), null); + + if (OneDev.getK8sService() != null) { + throw new ExplicitException("" + + "OneDev running inside kubernetes cluster does not support server shell executor. " + + "Please use kubernetes executor instead"); + } else if (Bootstrap.isInDocker()) { + throw new ExplicitException("Server shell executor is only supported when OneDev is installed " + + "directly on bare metal/virtual machine"); + } - @Override - protected void delete(File cacheDir) { - FileUtils.cleanDir(cacheDir); - } - - }; - cache.init(true); - FileUtils.createDir(workspaceDir); - - cache.installSymbolinks(workspaceDir); - - jobLogger.log("Copying job dependencies..."); - getJobManager().copyDependencies(jobContext, workspaceDir); - - File userDir = new File(buildDir, "user"); - FileUtils.createDir(userDir); - - getJobManager().reportJobWorkspace(jobContext, workspaceDir.getAbsolutePath()); - - CompositeFacade entryFacade = new CompositeFacade(jobContext.getActions()); - - OsInfo osInfo = OneDev.getInstance(OsInfo.class); - boolean successful = entryFacade.execute(new LeafHandler() { - - @Override - public boolean execute(LeafFacade facade, List position) { - runningStep = facade; + buildDir = FileUtils.createTempDir("onedev-build"); + File workspaceDir = new File(buildDir, "workspace"); try { - String stepNames = entryFacade.getNamesAsString(position); - jobLogger.notice("Running step \"" + stepNames + "\"..."); - - if (facade instanceof CommandFacade) { - CommandFacade commandFacade = (CommandFacade) facade; - OsExecution execution = commandFacade.getExecution(osInfo); - if (execution.getImage() != null) { - throw new ExplicitException("This step can only be executed by server docker executor, " - + "remote docker executor, or kubernetes executor"); - } - - commandFacade.generatePauseCommand(buildDir); - - File jobScriptFile = new File(buildDir, "job-commands" + commandFacade.getScriptExtension()); - try { - FileUtils.writeLines( - jobScriptFile, - new ArrayList<>(replacePlaceholders(execution.getCommands(), buildDir)), - commandFacade.getEndOfLine()); - } catch (IOException e) { - throw new RuntimeException(e); - } - - Commandline interpreter = commandFacade.getScriptInterpreter(); - Map environments = new HashMap<>(); - environments.put("GIT_HOME", userDir.getAbsolutePath()); - environments.put("ONEDEV_WORKSPACE", workspaceDir.getAbsolutePath()); - interpreter.workingDir(workspaceDir).environments(environments); - interpreter.addArgs(jobScriptFile.getAbsolutePath()); - - ExecutionResult result = interpreter.execute(ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); - if (result.getReturnCode() != 0) { - jobLogger.error("Step \"" + stepNames + "\" is failed: Command exited with code " + result.getReturnCode()); - return false; - } - } else if (facade instanceof RunContainerFacade || facade instanceof BuildImageFacade) { - throw new ExplicitException("This step can only be executed by server docker executor, " - + "remote docker executor, or kubernetes executor"); - } else if (facade instanceof CheckoutFacade) { - try { - CheckoutFacade checkoutFacade = (CheckoutFacade) facade; - jobLogger.log("Checking out code..."); - Commandline git = new Commandline(AppLoader.getInstance(GitLocation.class).getExecutable()); - - checkoutFacade.setupWorkingDir(git, workspaceDir); - - Map environments = new HashMap<>(); - environments.put("HOME", userDir.getAbsolutePath()); - git.environments(environments); + Member server = getClusterManager().getHazelcastInstance().getCluster().getLocalMember(); + jobLogger.log(String.format("Executing job (executor: %s, server: %s)...", getName(), + server.getAddress().getHost() + ":" + server.getAddress().getPort())); - CloneInfo cloneInfo = checkoutFacade.getCloneInfo(); - - cloneInfo.writeAuthData(userDir, git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); - - List trustCertContent = getTrustCertContent(); - if (!trustCertContent.isEmpty()) { - installGitCert(new File(userDir, "trust-cert.pem"), trustCertContent, - git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); - } + jobLogger.log(String.format("Executing job with executor '%s'...", getName())); - int cloneDepth = checkoutFacade.getCloneDepth(); - - cloneRepository(git, jobContext.getProjectGitDir(), cloneInfo.getCloneUrl(), jobContext.getRefName(), - jobContext.getCommitId().name(), checkoutFacade.isWithLfs(), checkoutFacade.isWithSubmodules(), - cloneDepth, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); - } catch (Exception e) { - jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e)); - return false; - } - } else { - ServerSideFacade serverSideFacade = (ServerSideFacade) facade; - try { - serverSideFacade.execute(buildDir, new ServerSideFacade.Runner() { - - @Override - public Map run(File inputDir, Map placeholderValues) { - return getJobManager().runServerStep(jobContext, position, inputDir, - placeholderValues, jobLogger); - } - - }); - } catch (Exception e) { - jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e)); - return false; - } + if (!jobContext.getServices().isEmpty()) { + throw new ExplicitException("This job requires services, which can only be supported " + + "by docker aware executors"); } - jobLogger.success("Step \"" + stepNames + "\" is successful"); - return true; + + File cacheHomeDir = getCacheHome(jobContext.getJobExecutor()); + + jobLogger.log("Setting up job cache...") ; + JobCache cache = new JobCache(cacheHomeDir) { + + @Override + protected Map allocate(CacheAllocationRequest request) { + return getJobManager().allocateCaches(jobContext, request); + } + + @Override + protected void delete(File cacheDir) { + FileUtils.cleanDir(cacheDir); + } + + }; + cache.init(true); + FileUtils.createDir(workspaceDir); + + cache.installSymbolinks(workspaceDir); + + jobLogger.log("Copying job dependencies..."); + getJobManager().copyDependencies(jobContext, workspaceDir); + + File userDir = new File(buildDir, "user"); + FileUtils.createDir(userDir); + + getJobManager().reportJobWorkspace(jobContext, workspaceDir.getAbsolutePath()); + + CompositeFacade entryFacade = new CompositeFacade(jobContext.getActions()); + + OsInfo osInfo = OneDev.getInstance(OsInfo.class); + boolean successful = entryFacade.execute(new LeafHandler() { + + @Override + public boolean execute(LeafFacade facade, List position) { + runningStep = facade; + try { + String stepNames = entryFacade.getNamesAsString(position); + jobLogger.notice("Running step \"" + stepNames + "\"..."); + + if (facade instanceof CommandFacade) { + CommandFacade commandFacade = (CommandFacade) facade; + OsExecution execution = commandFacade.getExecution(osInfo); + if (execution.getImage() != null) { + throw new ExplicitException("This step can only be executed by server docker executor, " + + "remote docker executor, or kubernetes executor"); + } + + commandFacade.generatePauseCommand(buildDir); + + File jobScriptFile = new File(buildDir, "job-commands" + commandFacade.getScriptExtension()); + try { + FileUtils.writeLines( + jobScriptFile, + new ArrayList<>(replacePlaceholders(execution.getCommands(), buildDir)), + commandFacade.getEndOfLine()); + } catch (IOException e) { + throw new RuntimeException(e); + } + + Commandline interpreter = commandFacade.getScriptInterpreter(); + Map environments = new HashMap<>(); + environments.put("GIT_HOME", userDir.getAbsolutePath()); + environments.put("ONEDEV_WORKSPACE", workspaceDir.getAbsolutePath()); + interpreter.workingDir(workspaceDir).environments(environments); + interpreter.addArgs(jobScriptFile.getAbsolutePath()); + + ExecutionResult result = interpreter.execute(ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); + if (result.getReturnCode() != 0) { + jobLogger.error("Step \"" + stepNames + "\" is failed: Command exited with code " + result.getReturnCode()); + return false; + } + } else if (facade instanceof RunContainerFacade || facade instanceof BuildImageFacade) { + throw new ExplicitException("This step can only be executed by server docker executor, " + + "remote docker executor, or kubernetes executor"); + } else if (facade instanceof CheckoutFacade) { + try { + CheckoutFacade checkoutFacade = (CheckoutFacade) facade; + jobLogger.log("Checking out code..."); + Commandline git = new Commandline(AppLoader.getInstance(GitLocation.class).getExecutable()); + + checkoutFacade.setupWorkingDir(git, workspaceDir); + + Map environments = new HashMap<>(); + environments.put("HOME", userDir.getAbsolutePath()); + git.environments(environments); + + CloneInfo cloneInfo = checkoutFacade.getCloneInfo(); + + cloneInfo.writeAuthData(userDir, git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); + + List trustCertContent = getTrustCertContent(); + if (!trustCertContent.isEmpty()) { + installGitCert(new File(userDir, "trust-cert.pem"), trustCertContent, + git, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); + } + + int cloneDepth = checkoutFacade.getCloneDepth(); + + cloneRepository(git, jobContext.getProjectGitDir(), cloneInfo.getCloneUrl(), jobContext.getRefName(), + jobContext.getCommitId().name(), checkoutFacade.isWithLfs(), checkoutFacade.isWithSubmodules(), + cloneDepth, ExecutorUtils.newInfoLogger(jobLogger), ExecutorUtils.newWarningLogger(jobLogger)); + } catch (Exception e) { + jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e)); + return false; + } + } else { + ServerSideFacade serverSideFacade = (ServerSideFacade) facade; + try { + serverSideFacade.execute(buildDir, new ServerSideFacade.Runner() { + + @Override + public Map run(File inputDir, Map placeholderValues) { + return getJobManager().runServerStep(jobContext, position, inputDir, + placeholderValues, jobLogger); + } + + }); + } catch (Exception e) { + jobLogger.error("Step \"" + stepNames + "\" is failed: " + getErrorMessage(e)); + return false; + } + } + jobLogger.success("Step \"" + stepNames + "\" is successful"); + return true; + } finally { + runningStep = null; + } + } + + @Override + public void skip(LeafFacade facade, List position) { + jobLogger.notice("Step \"" + entryFacade.getNamesAsString(position) + "\" is skipped"); + } + + }, new ArrayList<>()); + + if (!successful) + throw new FailedException(); } finally { - runningStep = null; + // Fix https://code.onedev.io/onedev/server/~issues/597 + if (SystemUtils.IS_OS_WINDOWS && workspaceDir.exists()) + FileUtils.deleteDir(workspaceDir); + synchronized (buildDir) { + FileUtils.deleteDir(buildDir); + } } } @Override - public void skip(LeafFacade facade, List position) { - jobLogger.notice("Step \"" + entryFacade.getNamesAsString(position) + "\" is skipped"); + public void resume(JobContext jobContext) { + if (buildDir != null) synchronized (buildDir) { + if (buildDir.exists()) + FileUtils.touchFile(new File(buildDir, "continue")); + } + } + + @Override + public Shell openShell(JobContext jobContext, Terminal terminal) { + if (buildDir != null) { + Commandline shell; + if (runningStep instanceof CommandFacade) { + CommandFacade commandStep = (CommandFacade) runningStep; + shell = new Commandline(commandStep.getShell(SystemUtils.IS_OS_WINDOWS, null)[0]); + } else if (SystemUtils.IS_OS_WINDOWS) { + shell = new Commandline("cmd"); + } else { + shell = new Commandline("sh"); + } + shell.workingDir(new File(buildDir, "workspace")); + return new CommandlineShell(terminal, shell); + } else { + throw new ExplicitException("Shell not ready"); + } } - }, new ArrayList<>()); - - if (!successful) - throw new FailedException(); - } finally { - // Fix https://code.onedev.io/onedev/server/~issues/597 - if (SystemUtils.IS_OS_WINDOWS && workspaceDir.exists()) - FileUtils.deleteDir(workspaceDir); - synchronized (buildDir) { - FileUtils.deleteDir(buildDir); - } - } - } - - @Override - public void resume(JobContext jobContext) { - if (buildDir != null) synchronized (buildDir) { - if (buildDir.exists()) - FileUtils.touchFile(new File(buildDir, "continue")); - } + }); + }; + getResourceAllocator().runServerJob(getName(), getConcurrencyNumber(), 1, runnable); } @Override @@ -309,23 +345,4 @@ public class ServerShellExecutor extends JobExecutor implements Testable io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.imports.bitbucketcloud.BitbucketModule diff --git a/server-plugin/server-plugin-import-gitea/pom.xml b/server-plugin/server-plugin-import-gitea/pom.xml index 2c2f4485cb..23fe264403 100644 --- a/server-plugin/server-plugin-import-gitea/pom.xml +++ b/server-plugin/server-plugin-import-gitea/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.imports.gitea.GiteaModule diff --git a/server-plugin/server-plugin-import-github/pom.xml b/server-plugin/server-plugin-import-github/pom.xml index 6fbc8a1875..6392986982 100644 --- a/server-plugin/server-plugin-import-github/pom.xml +++ b/server-plugin/server-plugin-import-github/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.imports.github.GitHubModule diff --git a/server-plugin/server-plugin-import-gitlab/pom.xml b/server-plugin/server-plugin-import-gitlab/pom.xml index b1eb9b9485..383033f2eb 100644 --- a/server-plugin/server-plugin-import-gitlab/pom.xml +++ b/server-plugin/server-plugin-import-gitlab/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.imports.gitlab.GitLabModule diff --git a/server-plugin/server-plugin-import-jiracloud/pom.xml b/server-plugin/server-plugin-import-jiracloud/pom.xml index 6b102fd7b7..e9c06daa1e 100644 --- a/server-plugin/server-plugin-import-jiracloud/pom.xml +++ b/server-plugin/server-plugin-import-jiracloud/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.imports.jiracloud.JiraModule diff --git a/server-plugin/server-plugin-import-url/pom.xml b/server-plugin/server-plugin-import-url/pom.xml index 689e14c5c7..81987d375b 100644 --- a/server-plugin/server-plugin-import-url/pom.xml +++ b/server-plugin/server-plugin-import-url/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.imports.url.UrlModule diff --git a/server-plugin/server-plugin-import-youtrack/pom.xml b/server-plugin/server-plugin-import-youtrack/pom.xml index 0ce54c1f58..93e00306c9 100644 --- a/server-plugin/server-plugin-import-youtrack/pom.xml +++ b/server-plugin/server-plugin-import-youtrack/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.imports.youtrack.YouTrackModule diff --git a/server-plugin/server-plugin-notification-discord/pom.xml b/server-plugin/server-plugin-notification-discord/pom.xml index 65a084b399..6c3150a990 100644 --- a/server-plugin/server-plugin-notification-discord/pom.xml +++ b/server-plugin/server-plugin-notification-discord/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.notification.discord.DiscordModule diff --git a/server-plugin/server-plugin-notification-slack/pom.xml b/server-plugin/server-plugin-notification-slack/pom.xml index c39a100581..251a9f86d3 100644 --- a/server-plugin/server-plugin-notification-slack/pom.xml +++ b/server-plugin/server-plugin-notification-slack/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.notification.slack.SlackModule diff --git a/server-plugin/server-plugin-report-checkstyle/pom.xml b/server-plugin/server-plugin-report-checkstyle/pom.xml index 7c17f20f33..31afa9324f 100644 --- a/server-plugin/server-plugin-report-checkstyle/pom.xml +++ b/server-plugin/server-plugin-report-checkstyle/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-report-clover/pom.xml b/server-plugin/server-plugin-report-clover/pom.xml index 5df601da0f..f3694fffab 100644 --- a/server-plugin/server-plugin-report-clover/pom.xml +++ b/server-plugin/server-plugin-report-clover/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-report-coverage/pom.xml b/server-plugin/server-plugin-report-coverage/pom.xml index 03566b924d..27e1b5e5a3 100644 --- a/server-plugin/server-plugin-report-coverage/pom.xml +++ b/server-plugin/server-plugin-report-coverage/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.report.coverage.CoverageModule diff --git a/server-plugin/server-plugin-report-cpd/pom.xml b/server-plugin/server-plugin-report-cpd/pom.xml index b286c33cab..499910501f 100644 --- a/server-plugin/server-plugin-report-cpd/pom.xml +++ b/server-plugin/server-plugin-report-cpd/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-report-jacoco/pom.xml b/server-plugin/server-plugin-report-jacoco/pom.xml index f1e01fcc56..aa249f4ff5 100644 --- a/server-plugin/server-plugin-report-jacoco/pom.xml +++ b/server-plugin/server-plugin-report-jacoco/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-report-jest/pom.xml b/server-plugin/server-plugin-report-jest/pom.xml index 7b12ab788d..c503f82863 100644 --- a/server-plugin/server-plugin-report-jest/pom.xml +++ b/server-plugin/server-plugin-report-jest/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-report-junit/pom.xml b/server-plugin/server-plugin-report-junit/pom.xml index c2eaffbe94..fbdac80732 100644 --- a/server-plugin/server-plugin-report-junit/pom.xml +++ b/server-plugin/server-plugin-report-junit/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-report-markdown/pom.xml b/server-plugin/server-plugin-report-markdown/pom.xml index 71f30a4bef..7f00be8838 100644 --- a/server-plugin/server-plugin-report-markdown/pom.xml +++ b/server-plugin/server-plugin-report-markdown/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.report.markdown.MarkdownModule diff --git a/server-plugin/server-plugin-report-pmd/pom.xml b/server-plugin/server-plugin-report-pmd/pom.xml index db00fb9765..eadfa49e8d 100644 --- a/server-plugin/server-plugin-report-pmd/pom.xml +++ b/server-plugin/server-plugin-report-pmd/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-report-problem/pom.xml b/server-plugin/server-plugin-report-problem/pom.xml index ff215d5bcc..0d06f209f3 100644 --- a/server-plugin/server-plugin-report-problem/pom.xml +++ b/server-plugin/server-plugin-report-problem/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.report.problem.ProblemModule diff --git a/server-plugin/server-plugin-report-spotbugs/pom.xml b/server-plugin/server-plugin-report-spotbugs/pom.xml index e1eeeddd6b..736e684341 100644 --- a/server-plugin/server-plugin-report-spotbugs/pom.xml +++ b/server-plugin/server-plugin-report-spotbugs/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 diff --git a/server-plugin/server-plugin-report-unittest/pom.xml b/server-plugin/server-plugin-report-unittest/pom.xml index c6b251f55e..c9e893b405 100644 --- a/server-plugin/server-plugin-report-unittest/pom.xml +++ b/server-plugin/server-plugin-report-unittest/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.report.unittest.UnitTestModule diff --git a/server-plugin/server-plugin-sso-discord/pom.xml b/server-plugin/server-plugin-sso-discord/pom.xml index 2ebf93a832..5ff51eff84 100644 --- a/server-plugin/server-plugin-sso-discord/pom.xml +++ b/server-plugin/server-plugin-sso-discord/pom.xml @@ -5,7 +5,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.sso.discord.DiscordModule diff --git a/server-plugin/server-plugin-sso-openid/pom.xml b/server-plugin/server-plugin-sso-openid/pom.xml index 584bd17d30..d0d4c60e01 100644 --- a/server-plugin/server-plugin-sso-openid/pom.xml +++ b/server-plugin/server-plugin-sso-openid/pom.xml @@ -6,7 +6,7 @@ io.onedev server-plugin - 7.8.17 + 7.9.0 io.onedev.server.plugin.sso.openid.OpenIdModule diff --git a/server-product/pom.xml b/server-product/pom.xml index 3e8eb685e7..bf58c7c61d 100644 --- a/server-product/pom.xml +++ b/server-product/pom.xml @@ -7,7 +7,7 @@ io.onedev server - 7.8.17 + 7.9.0 diff --git a/server-product/src/main/java/io/onedev/server/product/DefaultServerConfig.java b/server-product/src/main/java/io/onedev/server/product/DefaultServerConfig.java index 5430978517..9413d386df 100644 --- a/server-product/src/main/java/io/onedev/server/product/DefaultServerConfig.java +++ b/server-product/src/main/java/io/onedev/server/product/DefaultServerConfig.java @@ -1,27 +1,21 @@ package io.onedev.server.product; -import java.io.File; -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.Socket; -import java.util.List; - -import javax.inject.Inject; -import javax.inject.Singleton; - +import com.google.common.base.Splitter; +import io.onedev.commons.bootstrap.Bootstrap; +import io.onedev.server.ServerConfig; +import io.onedev.server.persistence.HibernateConfig; import org.apache.commons.lang3.StringUtils; import org.glassfish.jersey.internal.guava.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.common.base.Splitter; - -import io.onedev.commons.bootstrap.Bootstrap; -import io.onedev.commons.utils.ExplicitException; -import io.onedev.server.ServerConfig; -import io.onedev.server.persistence.HibernateConfig; -import oshi.SystemInfo; -import oshi.hardware.HardwareAbstractionLayer; +import javax.inject.Inject; +import javax.inject.Singleton; +import java.io.File; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.Socket; +import java.util.List; @Singleton public class DefaultServerConfig implements ServerConfig { @@ -38,10 +32,6 @@ public class DefaultServerConfig implements ServerConfig { private static final String PROP_CLUSTER_PORT = "cluster_port"; - private static final String PROP_SERVER_CPU = "server_cpu"; - - private static final String PROP_SERVER_MEMORY = "server_memory"; - private int httpPort; private int sshPort; @@ -138,54 +128,6 @@ public class DefaultServerConfig implements ServerConfig { clusterPort = 5701; else clusterPort = Integer.parseInt(clusterPortStr.trim()); - - HardwareAbstractionLayer hardware = null; - try { - hardware = new SystemInfo().getHardware(); - } catch (Exception e) { - logger.debug("Error calling oshi", e); - } - - String cpuString = System.getenv(PROP_SERVER_CPU); - if (StringUtils.isBlank(cpuString)) - cpuString = props.getProperty(PROP_SERVER_CPU); - if (StringUtils.isBlank(cpuString)) { - if (hardware != null) { - serverCpu = hardware.getProcessor().getLogicalProcessorCount()*1000; - } else { - serverCpu = 4000; - logger.warn("Unable to call oshi to get default cpu quota (cpu cores x 1000). Assuming as 4000. " - + "Configure it manually via environment variable or server property '" + PROP_SERVER_CPU - + "' if you do not want to use this value"); - } - } else { - try { - serverCpu = Integer.parseInt(cpuString); - } catch (NumberFormatException e) { - throw new ExplicitException("Property '" + PROP_SERVER_CPU + "' should be a number"); - } - } - - String memoryString = System.getenv(PROP_SERVER_MEMORY); - if (StringUtils.isBlank(memoryString)) - memoryString = props.getProperty(PROP_SERVER_MEMORY); - if (StringUtils.isBlank(memoryString)) { - if (hardware != null) { - serverMemory = (int) (hardware.getMemory().getTotal()/1024/1024); - } else { - serverMemory = 8000; - logger.warn("Unable to call oshi to get default memory quota (mega bytes of physical memory). " - + "Assuming as 8000. Configure it manually via environment variable or server property " - + "'" + PROP_SERVER_MEMORY + "' if you do not want to use this value"); - } - } else { - try { - serverMemory = Integer.parseInt(memoryString); - } catch (NumberFormatException e) { - throw new ExplicitException("Property '" + PROP_SERVER_MEMORY + "' should be a number"); - } - } - } @Override @@ -223,14 +165,4 @@ public class DefaultServerConfig implements ServerConfig { return clusterPort; } - @Override - public int getServerCpu() { - return serverCpu; - } - - @Override - public int getServerMemory() { - return serverMemory; - } - }