A basic implementation of Kubernetes executor

This commit is contained in:
Robin Shen 2019-06-26 09:04:55 +08:00
parent 854c920f9d
commit cf9d8c86e7
63 changed files with 762 additions and 919 deletions

View File

@ -195,7 +195,7 @@
</repository> </repository>
</repositories> </repositories>
<properties> <properties>
<commons.version>1.1.2</commons.version> <commons.version>1.1.3</commons.version>
<antlr.version>4.7.2</antlr.version> <antlr.version>4.7.2</antlr.version>
</properties> </properties>
</project> </project>

View File

@ -84,9 +84,8 @@ import io.onedev.server.ci.DefaultCISpecProvider;
import io.onedev.server.ci.job.DefaultJobManager; import io.onedev.server.ci.job.DefaultJobManager;
import io.onedev.server.ci.job.DependencyPopulator; import io.onedev.server.ci.job.DependencyPopulator;
import io.onedev.server.ci.job.JobManager; import io.onedev.server.ci.job.JobManager;
import io.onedev.server.ci.job.log.DefaultLogManager; import io.onedev.server.ci.job.log.DefaultJobLogManager;
import io.onedev.server.ci.job.log.LogManager; import io.onedev.server.ci.job.log.JobLogManager;
import io.onedev.server.ci.job.log.LogNormalizer;
import io.onedev.server.ci.job.log.instruction.LogInstruction; import io.onedev.server.ci.job.log.instruction.LogInstruction;
import io.onedev.server.entitymanager.BuildDependenceManager; import io.onedev.server.entitymanager.BuildDependenceManager;
import io.onedev.server.entitymanager.BuildManager; import io.onedev.server.entitymanager.BuildManager;
@ -318,7 +317,7 @@ public class CoreModule extends AbstractPluginModule {
bind(BuildManager.class).to(DefaultBuildManager.class); bind(BuildManager.class).to(DefaultBuildManager.class);
bind(BuildDependenceManager.class).to(DefaultBuildDependenceManager.class); bind(BuildDependenceManager.class).to(DefaultBuildDependenceManager.class);
bind(JobManager.class).to(DefaultJobManager.class); bind(JobManager.class).to(DefaultJobManager.class);
bind(LogManager.class).to(DefaultLogManager.class); bind(JobLogManager.class).to(DefaultJobLogManager.class);
bind(PullRequestBuildManager.class).to(DefaultPullRequestBuildManager.class); bind(PullRequestBuildManager.class).to(DefaultPullRequestBuildManager.class);
bind(MailManager.class).to(DefaultMailManager.class); bind(MailManager.class).to(DefaultMailManager.class);
bind(IssueManager.class).to(DefaultIssueManager.class); bind(IssueManager.class).to(DefaultIssueManager.class);
@ -389,7 +388,6 @@ public class CoreModule extends AbstractPluginModule {
contributeFromPackage(Authenticator.class, Authenticator.class); contributeFromPackage(Authenticator.class, Authenticator.class);
contributeFromPackage(DefaultCISpecProvider.class, DefaultCISpecProvider.class); contributeFromPackage(DefaultCISpecProvider.class, DefaultCISpecProvider.class);
contributeFromPackage(LogNormalizer.class, LogNormalizer.class);
contribute(CodePullAuthorizationSource.class, DefaultJobManager.class); contribute(CodePullAuthorizationSource.class, DefaultJobManager.class);

View File

@ -448,14 +448,14 @@ public class DefaultCommitInfoManager extends AbstractEnvironmentManager impleme
revList.revisions(revisions).order(Order.TOPO); revList.revisions(revisions).order(Order.TOPO);
List<ObjectId> historyIds = new ArrayList<>(); List<ObjectId> historyIds = new ArrayList<>();
for (String commitHash: revList.call(null)) for (String commitHash: revList.call())
historyIds.add(ObjectId.fromString(commitHash)); historyIds.add(ObjectId.fromString(commitHash));
revList = new RevListCommand(project.getGitDir()); revList = new RevListCommand(project.getGitDir());
revList.order(null).firstParent(true); revList.order(null).firstParent(true);
Set<ObjectId> firstParentIds = new HashSet<>(); Set<ObjectId> firstParentIds = new HashSet<>();
for (String commitHash: revList.call(null)) for (String commitHash: revList.call())
firstParentIds.add(ObjectId.fromString(commitHash)); firstParentIds.add(ObjectId.fromString(commitHash));
/* /*
@ -565,7 +565,7 @@ public class DefaultCommitInfoManager extends AbstractEnvironmentManager impleme
@Override @Override
public void run() { public void run() {
try { try {
log.call(null); log.call();
} catch (Exception e) { } catch (Exception e) {
logException.set(e); logException.set(e);
} finally { } finally {

View File

@ -15,6 +15,7 @@ import java.util.UUID;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException; import java.util.concurrent.CancellationException;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.Lock;
@ -47,7 +48,8 @@ import io.onedev.server.OneException;
import io.onedev.server.ci.CISpec; import io.onedev.server.ci.CISpec;
import io.onedev.server.ci.InvalidCISpecException; import io.onedev.server.ci.InvalidCISpecException;
import io.onedev.server.ci.JobDependency; import io.onedev.server.ci.JobDependency;
import io.onedev.server.ci.job.log.LogManager; import io.onedev.server.ci.job.log.JobLogManager;
import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.ci.job.param.JobParam; import io.onedev.server.ci.job.param.JobParam;
import io.onedev.server.ci.job.trigger.JobTrigger; import io.onedev.server.ci.job.trigger.JobTrigger;
import io.onedev.server.entitymanager.BuildManager; import io.onedev.server.entitymanager.BuildManager;
@ -104,7 +106,7 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
private final SessionManager sessionManager; private final SessionManager sessionManager;
private final LogManager logManager; private final JobLogManager logManager;
private final UserManager userManager; private final UserManager userManager;
@ -127,7 +129,7 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
@Inject @Inject
public DefaultJobManager(BuildManager buildManager, UserManager userManager, public DefaultJobManager(BuildManager buildManager, UserManager userManager,
ListenerRegistry listenerRegistry, SettingManager settingManager, ListenerRegistry listenerRegistry, SettingManager settingManager,
TransactionManager transactionManager, LogManager logManager, ExecutorService executorService, TransactionManager transactionManager, JobLogManager logManager, ExecutorService executorService,
SessionManager sessionManager, Set<DependencyPopulator> dependencyPopulators, SessionManager sessionManager, Set<DependencyPopulator> dependencyPopulators,
TaskScheduler taskScheduler, BuildParamManager buildParamManager) { TaskScheduler taskScheduler, BuildParamManager buildParamManager) {
this.settingManager = settingManager; this.settingManager = settingManager;
@ -248,21 +250,22 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
private void execute(Build build) { private void execute(Build build) {
try { try {
String jobId = UUID.randomUUID().toString(); String jobToken = UUID.randomUUID().toString();
Collection<String> jobSecretsToMask = Sets.newHashSet(jobId); Collection<String> jobSecretsToMask = Sets.newHashSet(jobToken);
Job job = build.getJob(); Job job = build.getJob();
ObjectId commitId = ObjectId.fromString(build.getCommitHash()); ObjectId commitId = ObjectId.fromString(build.getCommitHash());
JobExecutor executor = getJobExecutor(build.getProject(), commitId, job.getName(), job.getEnvironment()); JobExecutor executor = getJobExecutor(build.getProject(), commitId, job.getName(), job.getEnvironment());
if (executor != null) { if (executor != null) {
Logger logger = logManager.getLogger(build, job.getLogLevel(), jobSecretsToMask); JobLogger logger = logManager.getLogger(build, jobSecretsToMask);
Long buildId = build.getId(); Long buildId = build.getId();
String projectName = build.getProject().getName(); String projectName = build.getProject().getName();
File projectGitDir = build.getProject().getGitDir();
JobExecution execution = new JobExecution(executorService.submit(new Runnable() { JobExecution execution = new JobExecution(executorService.submit(new Runnable() {
@Override @Override
public void run() { public void run() {
logger.info("Creating server workspace..."); logger.log("Creating server workspace...");
File serverWorkspace = FileUtils.createTempDir("server-workspace"); File serverWorkspace = FileUtils.createTempDir("server-workspace");
try { try {
Map<String, String> envVars = new HashMap<>(); Map<String, String> envVars = new HashMap<>();
@ -274,7 +277,7 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
@Override @Override
public void run() { public void run() {
Build build = buildManager.load(buildId); Build build = buildManager.load(buildId);
logger.info("Populating dependencies..."); logger.log("Populating job dependencies...");
for (BuildDependence dependence: build.getDependencies()) { for (BuildDependence dependence: build.getDependencies()) {
for (DependencyPopulator populator: dependencyPopulators) for (DependencyPopulator populator: dependencyPopulators)
populator.populate(dependence.getDependency(), serverWorkspace, logger); populator.populate(dependence.getDependency(), serverWorkspace, logger);
@ -313,12 +316,13 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
}); });
logger.info("Executing job with executor '" + executor.getName() + "'..."); logger.log("Executing job with executor '" + executor.getName() + "'...");
List<String> commands = Splitter.on("\n").trimResults(CharMatcher.is('\r')).splitToList(job.getCommands()); List<String> commands = Splitter.on("\n").trimResults(CharMatcher.is('\r')).splitToList(job.getCommands());
JobContext jobContext = new JobContext(projectName, job.getEnvironment(), serverWorkspace, envVars, commands, JobContext jobContext = new JobContext(projectName, projectGitDir, job.getEnvironment(),
job.isCloneSource(), commitId, job.getCaches(), new PatternSet(includeFiles, excludeFiles), logger) { serverWorkspace, envVars, commands, job.isCloneSource(), commitId, job.getCaches(),
new PatternSet(includeFiles, excludeFiles), logger) {
@Override @Override
public void notifyJobRunning() { public void notifyJobRunning() {
@ -338,18 +342,18 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
}; };
jobContexts.put(jobId, jobContext); jobContexts.put(jobToken, jobContext);
try { try {
executor.execute(jobId, jobContext); executor.execute(jobToken, jobContext);
} finally { } finally {
jobContexts.remove(jobId); jobContexts.remove(jobToken);
} }
sessionManager.run(new Runnable() { sessionManager.run(new Runnable() {
@Override @Override
public void run() { public void run() {
logger.info("Collecting job outcomes..."); logger.log("Processing job outcomes...");
Build build = buildManager.load(buildId); Build build = buildManager.load(buildId);
for (JobOutcome outcome: job.getOutcomes()) for (JobOutcome outcome: job.getOutcomes())
outcome.process(build, serverWorkspace, logger); outcome.process(build, serverWorkspace, logger);
@ -357,8 +361,11 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
}); });
} catch (Exception e) { } catch (Exception e) {
if (ExceptionUtils.find(e, InterruptedException.class) == null) if (ExceptionUtils.find(e, InterruptedException.class) == null) {
logger.error("Error running build", e); DefaultJobManager.logger.debug("Error running build", e);
if (e.getMessage() != null)
logger.log(e.getMessage());
}
String errorMessage = e.getMessage(); String errorMessage = e.getMessage();
if (errorMessage != null) { if (errorMessage != null) {
for (String secret: jobSecretsToMask) for (String secret: jobSecretsToMask)
@ -368,10 +375,10 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
throw e; throw e;
} }
} finally { } finally {
logger.info("Deleting server workspace..."); logger.log("Deleting server workspace...");
executor.cleanDir(serverWorkspace); executor.cleanDir(serverWorkspace);
FileUtils.deleteDir(serverWorkspace); FileUtils.deleteDir(serverWorkspace);
logger.info("Server workspace deleted"); logger.log("Job finished");
} }
} }
@ -390,8 +397,8 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
} }
@Override @Override
public JobContext getJobContext(String jobId) { public JobContext getJobContext(String jobToken) {
return jobContexts.get(jobId); return jobContexts.get(jobToken);
} }
private void markBuildError(Build build, String errorMessage) { private void markBuildError(Build build, String errorMessage) {
@ -586,8 +593,12 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
build.setCanceller(userManager.load(cancellerId)); build.setCanceller(userManager.load(cancellerId));
} }
build.setStatus(Build.Status.CANCELLED); build.setStatus(Build.Status.CANCELLED);
} catch (Exception e) { } catch (ExecutionException e) {
if (e.getCause() != null)
build.setStatus(Build.Status.FAILED, e.getCause().getMessage());
else
build.setStatus(Build.Status.FAILED, e.getMessage()); build.setStatus(Build.Status.FAILED, e.getMessage());
} catch (InterruptedException e) {
} finally { } finally {
build.setFinishDate(new Date()); build.setFinishDate(new Date());
listenerRegistry.post(new BuildFinished(build)); listenerRegistry.post(new BuildFinished(build));
@ -628,9 +639,9 @@ public class DefaultJobManager implements JobManager, Runnable, SchedulableTask,
@Override @Override
public boolean canPullCode(HttpServletRequest request, Project project) { public boolean canPullCode(HttpServletRequest request, Project project) {
String jobId = request.getHeader(JOB_ID_HTTP_HEADER); String jobToken = request.getHeader(JOB_TOKEN_HTTP_HEADER);
if (jobId != null) { if (jobToken != null) {
JobContext context = getJobContext(jobId); JobContext context = getJobContext(jobToken);
if (context != null) if (context != null)
return context.getProjectName().equals(project.getName()); return context.getProjectName().equals(project.getName());
} }

View File

@ -2,12 +2,11 @@ package io.onedev.server.ci.job;
import java.io.File; import java.io.File;
import org.slf4j.Logger; import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.model.Build; import io.onedev.server.model.Build;
public interface DependencyPopulator { public interface DependencyPopulator {
void populate(Build dependency, File workspace, Logger logger); void populate(Build dependency, File workspace, JobLogger logger);
} }

View File

@ -21,7 +21,6 @@ import org.hibernate.validator.constraints.NotEmpty;
import io.onedev.server.ci.JobDependency; import io.onedev.server.ci.JobDependency;
import io.onedev.server.ci.job.cache.JobCache; import io.onedev.server.ci.job.cache.JobCache;
import io.onedev.server.ci.job.log.LogLevel;
import io.onedev.server.ci.job.param.JobParam; import io.onedev.server.ci.job.param.JobParam;
import io.onedev.server.ci.job.trigger.JobTrigger; import io.onedev.server.ci.job.trigger.JobTrigger;
import io.onedev.server.event.ProjectEvent; import io.onedev.server.event.ProjectEvent;
@ -61,8 +60,6 @@ public class Job implements Serializable, Validatable {
private long timeout = 3600; private long timeout = 3600;
private LogLevel logLevel = LogLevel.INFO;
private transient Map<String, InputSpec> paramSpecMap; private transient Map<String, InputSpec> paramSpecMap;
@Editable(order=100, description="Specify name of the job") @Editable(order=100, description="Specify name of the job")
@ -168,15 +165,6 @@ public class Job implements Serializable, Validatable {
this.timeout = timeout; this.timeout = timeout;
} }
@Editable(order=10300, group="More Settings")
public LogLevel getLogLevel() {
return logLevel;
}
public void setLogLevel(LogLevel logLevel) {
this.logLevel = logLevel;
}
public JobTrigger getMatchedTrigger(ProjectEvent event) { public JobTrigger getMatchedTrigger(ProjectEvent event) {
for (JobTrigger trigger: getTriggers()) { for (JobTrigger trigger: getTriggers()) {
if (trigger.matches(event, this)) if (trigger.matches(event, this))

View File

@ -14,7 +14,7 @@ import io.onedev.server.model.support.JobContext;
public interface JobManager { public interface JobManager {
public static final String JOB_ID_HTTP_HEADER = "X-ONEDEV-JOB-ID"; public static final String JOB_TOKEN_HTTP_HEADER = "X-ONEDEV-JOB-TOKEN";
Build submit(Project project, ObjectId commitId, String jobName, Build submit(Project project, ObjectId commitId, String jobName,
Map<String, List<String>> paramMap, @Nullable User submitter); Map<String, List<String>> paramMap, @Nullable User submitter);
@ -23,6 +23,6 @@ public interface JobManager {
void cancel(Build build, @Nullable User canceller); void cancel(Build build, @Nullable User canceller);
JobContext getJobContext(String jobId); JobContext getJobContext(String jobToken);
} }

View File

@ -4,9 +4,9 @@ import java.io.File;
import java.io.Serializable; import java.io.Serializable;
import org.hibernate.validator.constraints.NotEmpty; import org.hibernate.validator.constraints.NotEmpty;
import org.slf4j.Logger;
import io.onedev.server.OneDev; import io.onedev.server.OneDev;
import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.model.Build; import io.onedev.server.model.Build;
import io.onedev.server.storage.StorageManager; import io.onedev.server.storage.StorageManager;
import io.onedev.server.util.patternset.PatternSet; import io.onedev.server.util.patternset.PatternSet;
@ -39,7 +39,7 @@ public abstract class JobOutcome implements Serializable {
return patternSet; return patternSet;
} }
public abstract void process(Build build, File workspace, Logger logger); public abstract void process(Build build, File workspace, JobLogger logger);
public static String getLockKey(Build build, String outcomeDir) { public static String getLockKey(Build build, String outcomeDir) {
return "job-outcome:" + build.getId() + ":" + outcomeDir; return "job-outcome:" + build.getId() + ":" + outcomeDir;

View File

@ -5,12 +5,15 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.onedev.commons.utils.ExceptionUtils; import io.onedev.commons.utils.ExceptionUtils;
import io.onedev.commons.utils.FileUtils; import io.onedev.commons.utils.FileUtils;
public class CacheRunner { public class CacheRunner {
private static final Logger logger = LoggerFactory.getLogger(CacheRunner.class);
private final File cacheHome; private final File cacheHome;
private final Collection<JobCache> caches; private final Collection<JobCache> caches;
@ -20,7 +23,7 @@ public class CacheRunner {
this.caches = caches; this.caches = caches;
} }
public <T> T call(CacheCallable<T> callable, Logger logger) { public <T> T call(CacheCallable<T> callable) {
Collection<CacheAllocation> allocations = new ArrayList<>(); Collection<CacheAllocation> allocations = new ArrayList<>();
try { try {
if (!cacheHome.exists()) if (!cacheHome.exists())

View File

@ -16,7 +16,6 @@ import java.util.Date;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.Lock;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -53,9 +52,9 @@ import io.onedev.server.util.inputspec.SecretInput;
import io.onedev.server.web.websocket.WebSocketManager; import io.onedev.server.web.websocket.WebSocketManager;
@Singleton @Singleton
public class DefaultLogManager implements LogManager { public class DefaultJobLogManager implements JobLogManager {
private static final Logger logger = LoggerFactory.getLogger(DefaultLogManager.class); private static final Logger logger = LoggerFactory.getLogger(DefaultJobLogManager.class);
private static final int MIN_CACHE_ENTRIES = 5000; private static final int MIN_CACHE_ENTRIES = 5000;
@ -73,17 +72,14 @@ public class DefaultLogManager implements LogManager {
private final BuildManager buildManager; private final BuildManager buildManager;
private final Set<LogNormalizer> logNormalizers;
private final Map<Long, LogSnippet> recentSnippets = new ConcurrentHashMap<>(); private final Map<Long, LogSnippet> recentSnippets = new ConcurrentHashMap<>();
@Inject @Inject
public DefaultLogManager(StorageManager storageManager, WebSocketManager webSocketManager, public DefaultJobLogManager(StorageManager storageManager, WebSocketManager webSocketManager,
BuildManager buildManager, Set<LogNormalizer> logNormalizers) { BuildManager buildManager) {
this.storageManager = storageManager; this.storageManager = storageManager;
this.webSocketManager = webSocketManager; this.webSocketManager = webSocketManager;
this.buildManager = buildManager; this.buildManager = buildManager;
this.logNormalizers = logNormalizers;
} }
private File getLogFile(Long projectId, Long buildNumber) { private File getLogFile(Long projectId, Long buildNumber) {
@ -92,31 +88,18 @@ public class DefaultLogManager implements LogManager {
} }
@Override @Override
public Logger getLogger(Build build, LogLevel loggerLevel, Collection<String> jobSecretsToMask) { public JobLogger getLogger(Build build, Collection<String> jobSecretsToMask) {
Long projectId = build.getProject().getId(); Long projectId = build.getProject().getId();
Long buildId = build.getId(); Long buildId = build.getId();
Long buildNumber = build.getNumber(); Long buildNumber = build.getNumber();
Collection<String> secretValuesToMask = build.getSecretValuesToMask(); Collection<String> secretValuesToMask = build.getSecretValuesToMask();
secretValuesToMask.addAll(jobSecretsToMask); secretValuesToMask.addAll(jobSecretsToMask);
return new JobLogger(loggerLevel) { return new JobLogger() {
private static final long serialVersionUID = 1L;
private void log(LogLevel logLevel, String message) {
for (LogNormalizer logNormalizer: logNormalizers) {
LogNormalizer.Normalized normalized = logNormalizer.normalize(message);
if (normalized != null) {
if (normalized.getLevel() != null)
logLevel = normalized.getLevel();
message = normalized.getMessage();
break;
}
}
private void doLog(String message) {
for (String maskSecret: secretValuesToMask) for (String maskSecret: secretValuesToMask)
message = StringUtils.replace(message, maskSecret, SecretInput.MASK); message = StringUtils.replace(message, maskSecret, SecretInput.MASK);
if (logLevel.ordinal() <= loggerLevel.ordinal()) {
Lock lock = LockUtils.getReadWriteLock(getLockKey(buildId)).writeLock(); Lock lock = LockUtils.getReadWriteLock(getLockKey(buildId)).writeLock();
lock.lock(); lock.lock();
try { try {
@ -129,12 +112,12 @@ public class DefaultLogManager implements LogManager {
} }
} }
if (snippet != null) { if (snippet != null) {
snippet.entries.add(new LogEntry(new Date(), logLevel, message)); snippet.entries.add(new JobLogEntry(new Date(), message));
if (snippet.entries.size() > MAX_CACHE_ENTRIES) { if (snippet.entries.size() > MAX_CACHE_ENTRIES) {
File logFile = getLogFile(projectId, buildNumber); File logFile = getLogFile(projectId, buildNumber);
try (ObjectOutputStream oos = newOutputStream(logFile)) { try (ObjectOutputStream oos = newOutputStream(logFile)) {
while (snippet.entries.size() > MIN_CACHE_ENTRIES) { while (snippet.entries.size() > MIN_CACHE_ENTRIES) {
LogEntry entry = snippet.entries.remove(0); JobLogEntry entry = snippet.entries.remove(0);
oos.writeObject(entry); oos.writeObject(entry);
snippet.offset++; snippet.offset++;
} }
@ -149,10 +132,9 @@ public class DefaultLogManager implements LogManager {
lock.unlock(); lock.unlock();
} }
} }
}
@Override @Override
public void log(LogLevel logLevel, String message, Throwable throwable) { public void log(String message, Throwable throwable) {
try { try {
if (throwable != null) { if (throwable != null) {
for (String line: Splitter.on(EOL_PATTERN).split(Throwables.getStackTraceAsString(throwable))) for (String line: Splitter.on(EOL_PATTERN).split(Throwables.getStackTraceAsString(throwable)))
@ -160,7 +142,7 @@ public class DefaultLogManager implements LogManager {
} }
if (message.startsWith(LogInstruction.PREFIX)) { if (message.startsWith(LogInstruction.PREFIX)) {
log(logLevel, message); doLog(message);
InstructionContext instructionContext = LogInstruction.parse(message); InstructionContext instructionContext = LogInstruction.parse(message);
String name = instructionContext.Identifier().getText(); String name = instructionContext.Identifier().getText();
@ -186,13 +168,13 @@ public class DefaultLogManager implements LogManager {
paramValues.add(LogInstruction.unescape(LogInstruction.removeQuotes(terminalNode.getText()))); paramValues.add(LogInstruction.unescape(LogInstruction.removeQuotes(terminalNode.getText())));
params.put(paramName, paramValues); params.put(paramName, paramValues);
} }
log(LogLevel.DEBUG, "Executing log instruction '" + name + "'..."); doLog("Executing log instruction '" + name + "'...");
doInSession(instruction, buildId, params); doInSession(instruction, buildId, params);
} else { } else {
log(LogLevel.ERROR, "Unsupported log instruction: " + name); doLog("Unsupported log instruction: " + name);
} }
} else { } else {
log(logLevel, message); doLog(message);
} }
} catch (Exception e) { } catch (Exception e) {
logger.error("Error logging", e); logger.error("Error logging", e);
@ -211,8 +193,8 @@ public class DefaultLogManager implements LogManager {
return "build-log: " + buildId; return "build-log: " + buildId;
} }
private List<LogEntry> readLogEntries(File logFile, int from, int count) { private List<JobLogEntry> readLogEntries(File logFile, int from, int count) {
List<LogEntry> entries = new ArrayList<>(); List<JobLogEntry> entries = new ArrayList<>();
if (logFile.exists()) { if (logFile.exists()) {
try (ObjectInputStream ois = new ObjectInputStream(new BufferedInputStream(new FileInputStream(logFile)))) { try (ObjectInputStream ois = new ObjectInputStream(new BufferedInputStream(new FileInputStream(logFile)))) {
int numOfReadEntries = 0; int numOfReadEntries = 0;
@ -221,7 +203,7 @@ public class DefaultLogManager implements LogManager {
numOfReadEntries++; numOfReadEntries++;
} }
while (count == 0 || numOfReadEntries - from < count) { while (count == 0 || numOfReadEntries - from < count) {
entries.add((LogEntry) ois.readObject()); entries.add((JobLogEntry) ois.readObject());
numOfReadEntries++; numOfReadEntries++;
} }
} catch (EOFException e) { } catch (EOFException e) {
@ -237,7 +219,7 @@ public class DefaultLogManager implements LogManager {
if (logFile.exists()) { if (logFile.exists()) {
try (ObjectInputStream ois = new ObjectInputStream(new BufferedInputStream(new FileInputStream(logFile)))) { try (ObjectInputStream ois = new ObjectInputStream(new BufferedInputStream(new FileInputStream(logFile)))) {
while (true) { while (true) {
snippet.entries.add((LogEntry) ois.readObject()); snippet.entries.add((JobLogEntry) ois.readObject());
if (snippet.entries.size() > count) { if (snippet.entries.size() > count) {
snippet.entries.remove(0); snippet.entries.remove(0);
snippet.offset ++; snippet.offset ++;
@ -251,7 +233,7 @@ public class DefaultLogManager implements LogManager {
return snippet; return snippet;
} }
private List<LogEntry> readLogEntries(List<LogEntry> cachedEntries, int from, int count) { private List<JobLogEntry> readLogEntries(List<JobLogEntry> cachedEntries, int from, int count) {
if (from < cachedEntries.size()) { if (from < cachedEntries.size()) {
int to = from + count; int to = from + count;
if (to == from || to > cachedEntries.size()) if (to == from || to > cachedEntries.size())
@ -264,7 +246,7 @@ public class DefaultLogManager implements LogManager {
@Sessional @Sessional
@Override @Override
public List<LogEntry> readLogEntries(Build build, int from, int count) { public List<JobLogEntry> readLogEntries(Build build, int from, int count) {
Lock lock = LockUtils.getReadWriteLock(getLockKey(build.getId())).readLock(); Lock lock = LockUtils.getReadWriteLock(getLockKey(build.getId())).readLock();
lock.lock(); lock.lock();
try { try {
@ -274,7 +256,7 @@ public class DefaultLogManager implements LogManager {
if (from >= snippet.offset) { if (from >= snippet.offset) {
return readLogEntries(snippet.entries, from - snippet.offset, count); return readLogEntries(snippet.entries, from - snippet.offset, count);
} else { } else {
List<LogEntry> entries = new ArrayList<>(); List<JobLogEntry> entries = new ArrayList<>();
entries.addAll(readLogEntries(logFile, from, count)); entries.addAll(readLogEntries(logFile, from, count));
if (count == 0) if (count == 0)
entries.addAll(snippet.entries); entries.addAll(snippet.entries);
@ -347,7 +329,7 @@ public class DefaultLogManager implements LogManager {
if (snippet != null) { if (snippet != null) {
File logFile = getLogFile(build.getProject().getId(), build.getNumber()); File logFile = getLogFile(build.getProject().getId(), build.getNumber());
try (ObjectOutputStream oos = newOutputStream(logFile)) { try (ObjectOutputStream oos = newOutputStream(logFile)) {
for (LogEntry entry: snippet.entries) for (JobLogEntry entry: snippet.entries)
oos.writeObject(entry); oos.writeObject(entry);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
@ -387,7 +369,7 @@ public class DefaultLogManager implements LogManager {
LogSnippet snippet = recentSnippets.get(build.getId()); LogSnippet snippet = recentSnippets.get(build.getId());
if (snippet != null) { if (snippet != null) {
StringBuilder builder = new StringBuilder(); StringBuilder builder = new StringBuilder();
for (LogEntry entry: snippet.entries) for (JobLogEntry entry: snippet.entries)
builder.append(renderAsText(entry) + "\n"); builder.append(renderAsText(entry) + "\n");
recentBuffer = builder.toString().getBytes(Charsets.UTF_8); recentBuffer = builder.toString().getBytes(Charsets.UTF_8);
} }
@ -397,9 +379,8 @@ public class DefaultLogManager implements LogManager {
} }
} }
private String renderAsText(LogEntry entry) { private String renderAsText(JobLogEntry entry) {
String prefix = DATE_FORMATTER.print(new DateTime(entry.getDate())) + " " String prefix = DATE_FORMATTER.print(new DateTime(entry.getDate())) + " ";
+ StringUtils.leftPad(entry.getLevel().name(), 5) + " ";
StringBuilder builder = new StringBuilder(); StringBuilder builder = new StringBuilder();
for (String line: Splitter.on(EOL_PATTERN).split(entry.getMessage())) { for (String line: Splitter.on(EOL_PATTERN).split(entry.getMessage())) {
if (builder.length() == 0) { if (builder.length() == 0) {
@ -419,7 +400,7 @@ public class DefaultLogManager implements LogManager {
if (pos == buffer.length) { if (pos == buffer.length) {
if (ois != null) { if (ois != null) {
try { try {
buffer = (renderAsText((LogEntry) ois.readObject()) + "\n").getBytes(Charsets.UTF_8); buffer = (renderAsText((JobLogEntry) ois.readObject()) + "\n").getBytes(Charsets.UTF_8);
} catch (EOFException e) { } catch (EOFException e) {
IOUtils.closeQuietly(ois); IOUtils.closeQuietly(ois);
ois = null; ois = null;

View File

@ -3,19 +3,16 @@ package io.onedev.server.ci.job.log;
import java.io.Serializable; import java.io.Serializable;
import java.util.Date; import java.util.Date;
public class LogEntry implements Serializable { public class JobLogEntry implements Serializable {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private final Date date; private final Date date;
private final LogLevel level;
private final String message; private final String message;
public LogEntry(Date date, LogLevel level, String message) { public JobLogEntry(Date date, String message) {
this.date = date; this.date = date;
this.level = level;
this.message = message; this.message = message;
} }
@ -23,10 +20,6 @@ public class LogEntry implements Serializable {
return date; return date;
} }
public LogLevel getLevel() {
return level;
}
public String getMessage() { public String getMessage() {
return message; return message;
} }

View File

@ -4,13 +4,11 @@ import java.io.InputStream;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import org.slf4j.Logger;
import io.onedev.server.model.Build; import io.onedev.server.model.Build;
public interface LogManager { public interface JobLogManager {
Logger getLogger(Build build, LogLevel logLevel, Collection<String> jobSecrets); JobLogger getLogger(Build build, Collection<String> jobSecrets);
/** /**
* Read specified number of log entries from specified build, starting from specified index * Read specified number of log entries from specified build, starting from specified index
@ -26,7 +24,7 @@ public interface LogManager {
* log entries. Number of entries may be less than required count if there is no * log entries. Number of entries may be less than required count if there is no
* enough log entries * enough log entries
*/ */
List<LogEntry> readLogEntries(Build build, int offset, int count); List<JobLogEntry> readLogEntries(Build build, int offset, int count);
/** /**
* Read specified number of log entries starting from end of the log * Read specified number of log entries starting from end of the log

View File

@ -2,230 +2,12 @@ package io.onedev.server.ci.job.log;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import org.slf4j.Logger; public abstract class JobLogger {
import org.slf4j.helpers.MarkerIgnoringBase;
import org.slf4j.helpers.MessageFormatter;
public abstract class JobLogger extends MarkerIgnoringBase implements Logger { public abstract void log(String message, @Nullable Throwable t);
private static final long serialVersionUID = 1L; public void log(String message) {
log(message, null);
private final LogLevel logLevel;
public JobLogger(LogLevel logLevel) {
this.logLevel = logLevel;
} }
@Override
public String getName() {
return "Job Logger";
}
@Override
public boolean isTraceEnabled() {
return logLevel.compareTo(LogLevel.TRACE) >= 0;
}
@Override
public void trace(String msg) {
if (isTraceEnabled())
log(LogLevel.TRACE, msg, null);
}
@Override
public void trace(String format, Object arg) {
if (isTraceEnabled()) {
String msgStr = MessageFormatter.format(format, arg).getMessage();
log(LogLevel.TRACE, msgStr, null);
}
}
@Override
public void trace(String format, Object arg1, Object arg2) {
if (isTraceEnabled()) {
String msgStr = MessageFormatter.format(format, arg1, arg2).getMessage();
log(LogLevel.TRACE, msgStr, null);
}
}
@Override
public void trace(String msg, Throwable t) {
if (isTraceEnabled())
log(LogLevel.TRACE, msg, t);
}
@Override
public boolean isDebugEnabled() {
return logLevel.compareTo(LogLevel.DEBUG) >= 0;
}
@Override
public void debug(String msg) {
if (isDebugEnabled())
log(LogLevel.DEBUG, msg, null);
}
@Override
public void debug(String format, Object arg) {
if (isDebugEnabled()) {
String msgStr = MessageFormatter.format(format, arg).getMessage();
log(LogLevel.DEBUG, msgStr, null);
}
}
@Override
public void debug(String format, Object arg1, Object arg2) {
if (isDebugEnabled()) {
String msgStr = MessageFormatter.format(format, arg1, arg2).getMessage();
log(LogLevel.DEBUG, msgStr, null);
}
}
@Override
public void debug(String msg, Throwable t) {
if (isDebugEnabled())
log(LogLevel.DEBUG, msg, t);
}
@Override
public boolean isInfoEnabled() {
return logLevel.compareTo(LogLevel.INFO) >= 0;
}
@Override
public void info(String msg) {
if (isInfoEnabled())
log(LogLevel.INFO, msg, null);
}
@Override
public void info(String format, Object arg) {
if (isInfoEnabled()) {
String msgStr = MessageFormatter.format(format, arg).getMessage();
log(LogLevel.INFO, msgStr, null);
}
}
@Override
public void info(String format, Object arg1, Object arg2) {
if (isInfoEnabled()) {
String msgStr = MessageFormatter.format(format, arg1, arg2).getMessage();
log(LogLevel.INFO, msgStr, null);
}
}
@Override
public void info(String msg, Throwable t) {
if (isInfoEnabled())
log(LogLevel.INFO, msg, t);
}
@Override
public boolean isWarnEnabled() {
return logLevel.compareTo(LogLevel.WARN) >= 0;
}
@Override
public void warn(String msg) {
if (isWarnEnabled())
log(LogLevel.WARN, msg, null);
}
@Override
public void warn(String format, Object arg) {
if (isWarnEnabled()) {
String msgStr = MessageFormatter.format(format, arg).getMessage();
log(LogLevel.WARN, msgStr, null);
}
}
@Override
public void warn(String format, Object arg1, Object arg2) {
if (isWarnEnabled()) {
String msgStr = MessageFormatter.format(format, arg1, arg2).getMessage();
log(LogLevel.WARN, msgStr, null);
}
}
@Override
public void warn(String msg, Throwable t) {
if (isWarnEnabled())
log(LogLevel.WARN, msg, t);
}
@Override
public boolean isErrorEnabled() {
return logLevel.compareTo(LogLevel.ERROR) >= 0;
}
@Override
public void error(String msg) {
if (isErrorEnabled())
log(LogLevel.ERROR, msg, null);
}
@Override
public void error(String format, Object arg) {
if (isErrorEnabled()) {
String msgStr = MessageFormatter.format(format, arg).getMessage();
log(LogLevel.ERROR, msgStr, null);
}
}
@Override
public void error(String format, Object arg1, Object arg2) {
if (isErrorEnabled()) {
String msgStr = MessageFormatter.format(format, arg1, arg2).getMessage();
log(LogLevel.ERROR, msgStr, null);
}
}
@Override
public void error(String msg, Throwable t) {
if (isErrorEnabled())
log(LogLevel.ERROR, msg, t);
}
@Override
public void debug(String arg0, Object... arg1) {
if (isDebugEnabled()) {
String msgStr = MessageFormatter.arrayFormat(arg0, arg1).getMessage();
log(LogLevel.DEBUG, msgStr, null);
}
}
@Override
public void error(String arg0, Object... arg1) {
if (isErrorEnabled()) {
String msgStr = MessageFormatter.arrayFormat(arg0, arg1).getMessage();
log(LogLevel.ERROR, msgStr, null);
}
}
@Override
public void info(String arg0, Object... arg1) {
if (isInfoEnabled()) {
String msgStr = MessageFormatter.arrayFormat(arg0, arg1).getMessage();
log(LogLevel.INFO, msgStr, null);
}
}
@Override
public void trace(String arg0, Object... arg1) {
if (isTraceEnabled()) {
String msgStr = MessageFormatter.arrayFormat(arg0, arg1).getMessage();
log(LogLevel.TRACE, msgStr, null);
}
}
@Override
public void warn(String arg0, Object... arg1) {
if (isWarnEnabled()) {
String msgStr = MessageFormatter.arrayFormat(arg0, arg1).getMessage();
log(LogLevel.WARN, msgStr, null);
}
}
protected abstract void log(LogLevel logLevel, String message, @Nullable Throwable throwable);
} }

View File

@ -1,63 +0,0 @@
package io.onedev.server.ci.job.log;
import javax.annotation.Nullable;
import io.onedev.commons.launcher.loader.ExtensionPoint;
/**
* Sometimes job log message needs to be normalized for better display. For instance Maven command prints something
* like below:
*
* <pre>[INFO] Scanning for projects...</pre>
*
* In such case, we should extract the log level information to override OneDev's default log level, and the
* original message should also be modified to remove the log level information
*
* @author robin
*
*/
@ExtensionPoint
public interface LogNormalizer {
/**
* Normalize provided job log message
* @param message
* message to be normalized
* @return
* normalized result, or <tt>null</tt> if this normalizer does not handle this message
*/
@Nullable
Normalized normalize(String message);
public static class Normalized {
private final LogLevel level;
private final String message;
public Normalized(@Nullable LogLevel level, String message) {
this.level = level;
this.message = message;
}
/**
* @return
* Log level of this message, or <tt>null</tt> if log level information
* is not available in this message
*/
@Nullable
public LogLevel getLevel() {
return level;
}
/**
* @return
* normalized message
*/
public String getMessage() {
return message;
}
}
}

View File

@ -5,7 +5,7 @@ import java.util.List;
public class LogSnippet { public class LogSnippet {
public List<LogEntry> entries = new LinkedList<>(); public List<JobLogEntry> entries = new LinkedList<>();
/** /**
* offset of first log entry in the snippet * offset of first log entry in the snippet

View File

@ -193,7 +193,7 @@ public class DefaultCodeCommentManager extends AbstractEntityManager<CodeComment
command.after(DateUtils.addDays(oldestDate, -1)); command.after(DateUtils.addDays(oldestDate, -1));
command.revisions(Lists.newArrayList(commitId.name())); command.revisions(Lists.newArrayList(commitId.name()));
command.count(MAX_HISTORY_COMMITS_TO_CHECK); command.count(MAX_HISTORY_COMMITS_TO_CHECK);
Set<String> revisions = new HashSet<>(command.call(null)); Set<String> revisions = new HashSet<>(command.call());
RevCommit commit = revWalk.parseCommit(commitId); RevCommit commit = revWalk.parseCommit(commitId);
List<String> newLines = GitUtils.readLines(project.getRepository(), commit, path, List<String> newLines = GitUtils.readLines(project.getRepository(), commit, path,

View File

@ -208,7 +208,7 @@ public class DefaultProjectManager extends AbstractEntityManager<Project> implem
public void fork(Project from, Project to) { public void fork(Project from, Project to) {
save(to); save(to);
FileUtils.cleanDir(to.getGitDir()); FileUtils.cleanDir(to.getGitDir());
new CloneCommand(to.getGitDir()).mirror(true).from(from.getGitDir().getAbsolutePath()).call(null); new CloneCommand(to.getGitDir()).mirror(true).from(from.getGitDir().getAbsolutePath()).call();
commitInfoManager.cloneInfo(from, to); commitInfoManager.cloneInfo(from, to);
avatarManager.copyAvatar(from.getFacade(), to.getFacade()); avatarManager.copyAvatar(from.getFacade(), to.getFacade());
} }

View File

@ -157,7 +157,7 @@ public class GitFilter implements Filter {
try { try {
InputStream is = ServletUtils.getInputStream(request); InputStream is = ServletUtils.getInputStream(request);
OutputStream os = response.getOutputStream(); OutputStream os = response.getOutputStream();
new UploadCommand(gitDir, environments).input(is).output(os).call(null); new UploadCommand(gitDir, environments).input(is).output(os).call();
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -174,7 +174,7 @@ public class GitFilter implements Filter {
try { try {
InputStream is = ServletUtils.getInputStream(request); InputStream is = ServletUtils.getInputStream(request);
OutputStream os = response.getOutputStream(); OutputStream os = response.getOutputStream();
new ReceiveCommand(gitDir, environments).input(is).output(os).call(null); new ReceiveCommand(gitDir, environments).input(is).output(os).call();
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -221,13 +221,13 @@ public class GitFilter implements Filter {
if (service.contains("upload")) { if (service.contains("upload")) {
checkPullPermission(request, project); checkPullPermission(request, project);
writeInitial(response, service); writeInitial(response, service);
new AdvertiseUploadRefsCommand(gitDir).output(response.getOutputStream()).call(null); new AdvertiseUploadRefsCommand(gitDir).output(response.getOutputStream()).call();
} else { } else {
if (!SecurityUtils.canWriteCode(project.getFacade())) { if (!SecurityUtils.canWriteCode(project.getFacade())) {
throw new UnauthorizedException("You do not have permission to push to this project."); throw new UnauthorizedException("You do not have permission to push to this project.");
} }
writeInitial(response, service); writeInitial(response, service);
new AdvertiseReceiveRefsCommand(gitDir).output(response.getOutputStream()).call(null); new AdvertiseReceiveRefsCommand(gitDir).output(response.getOutputStream()).call();
} }
} }

View File

@ -327,7 +327,7 @@ public class GitUtils {
new FetchCommand(toRepository.getDirectory()) new FetchCommand(toRepository.getDirectory())
.from(fromRepository.getDirectory().getAbsolutePath()) .from(fromRepository.getDirectory().getAbsolutePath())
.refspec(fetchRef) .refspec(fetchRef)
.call(null); .call();
} else { } else {
LockUtils.call("repository-fetch:" + fromRepository.getDirectory(), new Callable<Void>() { LockUtils.call("repository-fetch:" + fromRepository.getDirectory(), new Callable<Void>() {
@ -340,7 +340,7 @@ public class GitUtils {
new FetchCommand(toRepository.getDirectory()) new FetchCommand(toRepository.getDirectory())
.from(fromRepository.getDirectory().getAbsolutePath()) .from(fromRepository.getDirectory().getAbsolutePath())
.refspec(refUpdate.getName()) .refspec(refUpdate.getName())
.call(null); .call();
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -356,7 +356,7 @@ public class GitUtils {
if (gitEnvs != null && !gitEnvs.isEmpty()) { if (gitEnvs != null && !gitEnvs.isEmpty()) {
IsAncestorCommand cmd = new IsAncestorCommand(repository.getDirectory(), gitEnvs); IsAncestorCommand cmd = new IsAncestorCommand(repository.getDirectory(), gitEnvs);
cmd.ancestor(base.name()).descendant(tip.name()); cmd.ancestor(base.name()).descendant(tip.name());
return cmd.call(null); return cmd.call();
} else { } else {
try (RevWalk revWalk = new RevWalk(repository)) { try (RevWalk revWalk = new RevWalk(repository)) {
RevCommit baseCommit; RevCommit baseCommit;

View File

@ -27,20 +27,20 @@ public class AdvertiseReceiveRefsCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkNotNull(output); Preconditions.checkNotNull(output);
Logger effectiveLogger = logger!=null?logger:AdvertiseReceiveRefsCommand.logger;
Commandline cmd = cmd(); Commandline cmd = cmd();
cmd.addArgs("receive-pack", "--stateless-rpc", "--advertise-refs", "."); cmd.addArgs("receive-pack", "--stateless-rpc", "--advertise-refs", ".");
cmd.execute(output, new LineConsumer() { cmd.execute(output, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return null; return null;
} }

View File

@ -27,20 +27,20 @@ public class AdvertiseUploadRefsCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkNotNull(output); Preconditions.checkNotNull(output);
Logger effectiveLogger = logger!=null?logger:AdvertiseUploadRefsCommand.logger;
Commandline cmd = cmd(); Commandline cmd = cmd();
cmd.addArgs("upload-pack", "--stateless-rpc", "--advertise-refs", "."); cmd.addArgs("upload-pack", "--stateless-rpc", "--advertise-refs", ".");
cmd.execute(output, new LineConsumer() { cmd.execute(output, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return null; return null;
} }

View File

@ -78,7 +78,7 @@ public class BlameCommand extends GitCommand<Collection<BlameBlock>> {
} }
@Override @Override
public Collection<BlameBlock> call(Logger logger) { public Collection<BlameBlock> call() {
Preconditions.checkArgument(commitHash!=null && ObjectId.isId(commitHash), "commit hash has to be specified."); Preconditions.checkArgument(commitHash!=null && ObjectId.isId(commitHash), "commit hash has to be specified.");
Preconditions.checkNotNull(file, "file parameter has to be specified."); Preconditions.checkNotNull(file, "file parameter has to be specified.");
@ -166,7 +166,7 @@ public class BlameCommand extends GitCommand<Collection<BlameBlock>> {
} }
} }
}, logger); });
if (!endOfFile.get()) if (!endOfFile.get())
result.checkReturnCode(); result.checkReturnCode();

View File

@ -26,27 +26,26 @@ public class CheckoutCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkNotNull(refspec, "refspec param has to be specified."); Preconditions.checkNotNull(refspec, "refspec param has to be specified.");
Commandline cmd = cmd().addArgs("checkout", "--quiet", refspec); Commandline cmd = cmd().addArgs("checkout", "--quiet", refspec);
Logger effectiveLogger = logger!=null?logger:CheckoutCommand.logger;
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.trace(line); logger.trace(line);
} }
}, new LineConsumer() { }, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return null; return null;
} }

View File

@ -24,28 +24,27 @@ public class CleanCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Commandline cmd = cmd().addArgs("clean"); Commandline cmd = cmd().addArgs("clean");
if (options != null) if (options != null)
cmd.addArgs(options); cmd.addArgs(options);
Logger effectiveLogger = logger!=null?logger:CleanCommand.logger;
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.trace(line); logger.trace(line);
} }
}, new LineConsumer() { }, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return null; return null;
} }

View File

@ -61,7 +61,7 @@ public class CloneCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkNotNull(from, "from has to be specified."); Preconditions.checkNotNull(from, "from has to be specified.");
Commandline cmd = cmd().addArgs("clone"); Commandline cmd = cmd().addArgs("clone");
@ -79,12 +79,11 @@ public class CloneCommand extends GitCommand<Void> {
cmd.addArgs(from); cmd.addArgs(from);
cmd.addArgs("."); cmd.addArgs(".");
Logger effectiveLogger = logger!=null?logger:CloneCommand.logger;
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.trace(line); logger.trace(line);
} }
}, new LineConsumer(){ }, new LineConsumer(){
@ -92,14 +91,14 @@ public class CloneCommand extends GitCommand<Void> {
@Override @Override
public void consume(String line) { public void consume(String line) {
if (line.startsWith("Cloning into ") || line.equals("done.")) if (line.startsWith("Cloning into ") || line.equals("done."))
effectiveLogger.trace(line); logger.trace(line);
else if (line.contains("You appear to have cloned an empty repository")) else if (line.contains("You appear to have cloned an empty repository"))
effectiveLogger.warn(line); logger.warn(line);
else else
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return null; return null;
} }

View File

@ -40,7 +40,7 @@ public class FetchCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkNotNull(from, "from param has to be specified."); Preconditions.checkNotNull(from, "from param has to be specified.");
Commandline cmd = cmd().addArgs("fetch"); Commandline cmd = cmd().addArgs("fetch");
@ -52,22 +52,21 @@ public class FetchCommand extends GitCommand<Void> {
for (String each: refspec) for (String each: refspec)
cmd.addArgs(each); cmd.addArgs(each);
Logger effectiveLogger = logger!=null?logger:FetchCommand.logger;
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.trace(line); logger.trace(line);
} }
}, new LineConsumer() { }, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return null; return null;
} }

View File

@ -100,6 +100,6 @@ public abstract class GitCommand<V> {
return AppLoader.getInstance(GitConfig.class).getExecutable(); return AppLoader.getInstance(GitConfig.class).getExecutable();
} }
public abstract V call(@Nullable Logger logger); public abstract V call();
} }

View File

@ -33,7 +33,7 @@ public class InitCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkNotNull(from, "from param has to be specified."); Preconditions.checkNotNull(from, "from param has to be specified.");
Commandline cmd = cmd().addArgs("fetch"); Commandline cmd = cmd().addArgs("fetch");
@ -43,12 +43,11 @@ public class InitCommand extends GitCommand<Void> {
for (String each: refspec) for (String each: refspec)
cmd.addArgs(each); cmd.addArgs(each);
Logger effectiveLogger = logger!=null?logger:InitCommand.logger;
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.trace(line); logger.trace(line);
} }
}, new LineConsumer() { }, new LineConsumer() {
@ -59,13 +58,13 @@ public class InitCommand extends GitCommand<Void> {
|| line.startsWith(" * branch") || line.startsWith(" * branch")
|| line.startsWith(" * [new ref]") || line.startsWith(" * [new ref]")
|| line.contains("..") && line.contains("->")) { || line.contains("..") && line.contains("->")) {
effectiveLogger.info(line); logger.info(line);
} else { } else {
effectiveLogger.error(line); logger.error(line);
} }
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return null; return null;
} }

View File

@ -35,7 +35,7 @@ public class IsAncestorCommand extends GitCommand<Boolean> {
} }
@Override @Override
public Boolean call(Logger logger) { public Boolean call() {
Preconditions.checkNotNull(ancestor, "ancestor has to be specified."); Preconditions.checkNotNull(ancestor, "ancestor has to be specified.");
Preconditions.checkNotNull(descendant, "descendant has to be specified."); Preconditions.checkNotNull(descendant, "descendant has to be specified.");
@ -43,7 +43,6 @@ public class IsAncestorCommand extends GitCommand<Boolean> {
cmd.addArgs("merge-base", "--is-ancestor", ancestor, descendant); cmd.addArgs("merge-base", "--is-ancestor", ancestor, descendant);
Logger effectiveLogger = logger!=null?logger:IsAncestorCommand.logger;
ExecuteResult result = cmd.execute(new LineConsumer() { ExecuteResult result = cmd.execute(new LineConsumer() {
@Override @Override
@ -54,10 +53,10 @@ public class IsAncestorCommand extends GitCommand<Boolean> {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger); });
if (result.getReturnCode() == 0) if (result.getReturnCode() == 0)
return true; return true;

View File

@ -45,7 +45,7 @@ public class ListChangedFilesCommand extends GitCommand<Collection<String>> {
} }
@Override @Override
public Collection<String> call(Logger logger) { public Collection<String> call() {
Preconditions.checkNotNull(toRev, "toRev has to be specified."); Preconditions.checkNotNull(toRev, "toRev has to be specified.");
Preconditions.checkNotNull(fromRev, "fromRev has to be specified."); Preconditions.checkNotNull(fromRev, "fromRev has to be specified.");
@ -58,7 +58,6 @@ public class ListChangedFilesCommand extends GitCommand<Collection<String>> {
if (path != null) if (path != null)
cmd.addArgs("--", path); cmd.addArgs("--", path);
Logger effectiveLogger = logger!=null?logger:ListChangedFilesCommand.logger;
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
@ -71,10 +70,10 @@ public class ListChangedFilesCommand extends GitCommand<Collection<String>> {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return changedFiles; return changedFiles;
} }

View File

@ -44,7 +44,7 @@ public class ListFileChangesCommand extends GitCommand<Collection<FileChange>> {
} }
@Override @Override
public Collection<FileChange> call(Logger logger) { public Collection<FileChange> call() {
Preconditions.checkNotNull(toRev, "toRev has to be specified."); Preconditions.checkNotNull(toRev, "toRev has to be specified.");
Preconditions.checkNotNull(fromRev, "fromRev has to be specified."); Preconditions.checkNotNull(fromRev, "fromRev has to be specified.");
@ -57,7 +57,6 @@ public class ListFileChangesCommand extends GitCommand<Collection<FileChange>> {
if (path != null) if (path != null)
cmd.addArgs("--", path); cmd.addArgs("--", path);
Logger effectiveLogger = logger!=null?logger:ListFileChangesCommand.logger;
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
@ -89,10 +88,10 @@ public class ListFileChangesCommand extends GitCommand<Collection<FileChange>> {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return fileChanges; return fileChanges;
} }

View File

@ -51,7 +51,7 @@ public abstract class LogCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkArgument(!revisions.isEmpty(), "Log revisions have to be specified"); Preconditions.checkArgument(!revisions.isEmpty(), "Log revisions have to be specified");
Commandline cmd = cmd(); Commandline cmd = cmd();
@ -84,8 +84,6 @@ public abstract class LogCommand extends GitCommand<Void> {
for (String revision: revisions) for (String revision: revisions)
cmd.addArgs(revision); cmd.addArgs(revision);
Logger effectiveLogger = logger!=null?logger:LogCommand.logger;
AtomicReference<GitCommit.Builder> commitBuilderRef = new AtomicReference<>(null); AtomicReference<GitCommit.Builder> commitBuilderRef = new AtomicReference<>(null);
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@ -159,13 +157,13 @@ public abstract class LogCommand extends GitCommand<Void> {
public void consume(String line) { public void consume(String line) {
if (line.contains("inexact rename detection was skipped") if (line.contains("inexact rename detection was skipped")
|| line.contains("you may want to set your diff.renameLimit variable")) { || line.contains("you may want to set your diff.renameLimit variable")) {
effectiveLogger.trace(line); logger.trace(line);
} else { } else {
effectiveLogger.error(line); logger.error(line);
} }
} }
}, logger).checkReturnCode(); }).checkReturnCode();
if (commitBuilderRef.get() != null) if (commitBuilderRef.get() != null)
consume(commitBuilderRef.get().build()); consume(commitBuilderRef.get().build());

View File

@ -36,22 +36,21 @@ public class ReceiveCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkNotNull(input); Preconditions.checkNotNull(input);
Preconditions.checkNotNull(output); Preconditions.checkNotNull(output);
Commandline cmd = cmd(); Commandline cmd = cmd();
cmd.addArgs("receive-pack", "--stateless-rpc", "."); cmd.addArgs("receive-pack", "--stateless-rpc", ".");
Logger effectiveLogger = logger!=null?logger:ReceiveCommand.logger;
cmd.execute(output, new LineConsumer() { cmd.execute(output, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, input, logger).checkReturnCode(); }, input).checkReturnCode();
return null; return null;
} }

View File

@ -165,7 +165,7 @@ public class RevListCommand extends GitCommand<List<String>> {
} }
@Override @Override
public List<String> call(Logger logger) { public List<String> call() {
Commandline cmd = cmd(); Commandline cmd = cmd();
cmd.addArgs("rev-list"); cmd.addArgs("rev-list");
@ -218,8 +218,6 @@ public class RevListCommand extends GitCommand<List<String>> {
for (String path: paths) for (String path: paths)
cmd.addArgs(path); cmd.addArgs(path);
Logger effectiveLogger = logger!=null?logger:RevListCommand.logger;
List<String> commitHashes = new ArrayList<>(); List<String> commitHashes = new ArrayList<>();
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@ -232,10 +230,10 @@ public class RevListCommand extends GitCommand<List<String>> {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return commitHashes; return commitHashes;
} }

View File

@ -36,22 +36,21 @@ public class UploadCommand extends GitCommand<Void> {
} }
@Override @Override
public Void call(Logger logger) { public Void call() {
Preconditions.checkNotNull(input); Preconditions.checkNotNull(input);
Preconditions.checkNotNull(output); Preconditions.checkNotNull(output);
Commandline cmd = cmd(); Commandline cmd = cmd();
cmd.addArgs("upload-pack", "--stateless-rpc", "."); cmd.addArgs("upload-pack", "--stateless-rpc", ".");
Logger effectiveLogger = logger!=null?logger:UploadCommand.logger;
cmd.execute(output, new LineConsumer() { cmd.execute(output, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
effectiveLogger.error(line); logger.error(line);
} }
}, input, logger).checkReturnCode(); }, input).checkReturnCode();
return null; return null;
} }

View File

@ -1303,7 +1303,7 @@ public class Project extends AbstractEntity implements Validatable {
List<User> authors = new ArrayList<>(); List<User> authors = new ArrayList<>();
UserManager userManager = OneDev.getInstance(UserManager.class); UserManager userManager = OneDev.getInstance(UserManager.class);
for (BlameBlock block: cmd.call(null)) { for (BlameBlock block: cmd.call()) {
User author = userManager.find(block.getCommit().getAuthor()); User author = userManager.find(block.getCommit().getAuthor());
if (author != null && !authors.contains(author)) if (author != null && !authors.contains(author))
authors.add(author); authors.add(author);
@ -1434,7 +1434,7 @@ public class Project extends AbstractEntity implements Validatable {
if (gitEnvs != null && !gitEnvs.isEmpty()) { if (gitEnvs != null && !gitEnvs.isEmpty()) {
ListChangedFilesCommand cmd = new ListChangedFilesCommand(getGitDir(), gitEnvs); ListChangedFilesCommand cmd = new ListChangedFilesCommand(getGitDir(), gitEnvs);
cmd.fromRev(oldObjectId.name()).toRev(newObjectId.name()); cmd.fromRev(oldObjectId.name()).toRev(newObjectId.name());
return cmd.call(null); return cmd.call();
} else { } else {
return GitUtils.getChangedFiles(getRepository(), oldObjectId, newObjectId); return GitUtils.getChangedFiles(getRepository(), oldObjectId, newObjectId);
} }

View File

@ -108,7 +108,7 @@ public class PullRequestUpdate extends AbstractEntity {
ListFileChangesCommand cmd = new ListFileChangesCommand(getRequest().getTargetProject().getGitDir()); ListFileChangesCommand cmd = new ListFileChangesCommand(getRequest().getTargetProject().getGitDir());
cmd.fromRev(getBaseCommitHash()); cmd.fromRev(getBaseCommitHash());
cmd.toRev(getHeadCommitHash()); cmd.toRev(getHeadCommitHash());
fileChanges = cmd.call(null); fileChanges = cmd.call();
} }
return fileChanges; return fileChanges;
} }

View File

@ -9,10 +9,10 @@ import java.util.Map;
import org.eclipse.jgit.api.Git; import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.GitAPIException; import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
import org.slf4j.Logger;
import io.onedev.commons.utils.ExceptionUtils; import io.onedev.commons.utils.ExceptionUtils;
import io.onedev.server.ci.job.cache.JobCache; import io.onedev.server.ci.job.cache.JobCache;
import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.git.command.CheckoutCommand; import io.onedev.server.git.command.CheckoutCommand;
import io.onedev.server.git.command.FetchCommand; import io.onedev.server.git.command.FetchCommand;
import io.onedev.server.util.patternset.PatternSet; import io.onedev.server.util.patternset.PatternSet;
@ -21,6 +21,8 @@ public abstract class JobContext {
private final String projectName; private final String projectName;
private final File gitDir;
private final String environment; private final String environment;
private final File serverWorkspace; private final File serverWorkspace;
@ -37,13 +39,14 @@ public abstract class JobContext {
private final PatternSet collectFiles; private final PatternSet collectFiles;
private final Logger logger; private final JobLogger logger;
public JobContext(String projectName, String environment, File workspace, public JobContext(String projectName, File gitDir, String environment, File workspace,
Map<String, String> envVars, List<String> commands, boolean cloneSource, Map<String, String> envVars, List<String> commands, boolean cloneSource,
ObjectId commitId, Collection<JobCache> caches, PatternSet collectFiles, ObjectId commitId, Collection<JobCache> caches, PatternSet collectFiles,
Logger logger) { JobLogger logger) {
this.projectName = projectName; this.projectName = projectName;
this.gitDir = gitDir;
this.environment = environment; this.environment = environment;
this.serverWorkspace = workspace; this.serverWorkspace = workspace;
this.envVars = envVars; this.envVars = envVars;
@ -91,13 +94,13 @@ public abstract class JobContext {
return collectFiles; return collectFiles;
} }
public Logger getLogger() { public JobLogger getLogger() {
return logger; return logger;
} }
private void fetchAndCheckout(File gitDir) { private void fetchAndCheckout(File checkoutDir) {
new FetchCommand(gitDir).depth(1).from(gitDir.getAbsolutePath()).refspec(commitId.name()).call(logger); new FetchCommand(checkoutDir).depth(1).from(gitDir.getAbsolutePath()).refspec(commitId.name()).call();
new CheckoutCommand(gitDir).refspec(commitId.name()).call(logger); new CheckoutCommand(checkoutDir).refspec(commitId.name()).call();
} }
public void checkoutSource(File dir) { public void checkoutSource(File dir) {

View File

@ -120,7 +120,7 @@ public abstract class JobExecutor implements Serializable {
this.cacheTTL = cacheTTL; this.cacheTTL = cacheTTL;
} }
public abstract void execute(String jobId, JobContext context); public abstract void execute(String jobToken, JobContext context);
public final boolean isApplicable(Project project, ObjectId commitId, String jobName, String environment) { public final boolean isApplicable(Project project, ObjectId commitId, String jobName, String environment) {
Matcher matcher = new ChildAwareMatcher(); Matcher matcher = new ChildAwareMatcher();

View File

@ -15,8 +15,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import io.onedev.server.OneDev; import io.onedev.server.OneDev;
import io.onedev.server.ci.job.log.LogEntry; import io.onedev.server.ci.job.log.JobLogEntry;
import io.onedev.server.ci.job.log.LogManager; import io.onedev.server.ci.job.log.JobLogManager;
import io.onedev.server.ci.job.log.LogSnippet; import io.onedev.server.ci.job.log.LogSnippet;
import io.onedev.server.model.Build; import io.onedev.server.model.Build;
import io.onedev.server.web.behavior.WebSocketObserver; import io.onedev.server.web.behavior.WebSocketObserver;
@ -39,7 +39,7 @@ public class BuildLogPanel extends GenericPanel<Build> {
add(new WebSocketObserver() { add(new WebSocketObserver() {
private void appendRecentLogEntries(IPartialPageRequestHandler handler) { private void appendRecentLogEntries(IPartialPageRequestHandler handler) {
List<LogEntry> logEntries = getLogManager().readLogEntries(getBuild(), nextOffset, 0); List<JobLogEntry> logEntries = getLogManager().readLogEntries(getBuild(), nextOffset, 0);
if (!logEntries.isEmpty()) { if (!logEntries.isEmpty()) {
nextOffset += logEntries.size(); nextOffset += logEntries.size();
@ -70,8 +70,8 @@ public class BuildLogPanel extends GenericPanel<Build> {
setOutputMarkupId(true); setOutputMarkupId(true);
} }
private LogManager getLogManager() { private JobLogManager getLogManager() {
return OneDev.getInstance(LogManager.class); return OneDev.getInstance(JobLogManager.class);
} }
private String asJSON(Object obj) { private String asJSON(Object obj) {

View File

@ -4,26 +4,11 @@
color: white; color: white;
position: relative; position: relative;
} }
.build-log>.log-entry.ERROR { .build-log>.log-entry>.date {
color: red;
}
.build-log>.log-entry.WARN {
color: yellow;
}
.build-log>.log-entry.DEBUG {
color: lightgray;
}
.build-log>.log-entry.TRACE {
color: darkgray;
}
.build-log>.log-entry>* {
padding-right: 8px; padding-right: 8px;
} }
.build-log>.log-entry>.message {
padding-right: 0;
}
.build-log>.too-many-entries, .build-log>.no-entries { .build-log>.too-many-entries, .build-log>.no-entries {
color: red; color: yellow;
font-size: 16px; font-size: 16px;
font-weight: bold; font-weight: bold;
} }

View File

@ -8,9 +8,8 @@ onedev.server.buildLog = {
onedev.server.buildLog.appendLogEntries(containerId, logEntries, maxNumOfLogEntries); onedev.server.buildLog.appendLogEntries(containerId, logEntries, maxNumOfLogEntries);
}, },
renderLogEntry: function(logEntry) { renderLogEntry: function(logEntry) {
var $logEntry = $("<div class='log-entry " + logEntry.level + "'></div>"); var $logEntry = $("<div class='log-entry'></div>");
$logEntry.append("<span class='date'>" + moment(logEntry.date).format("HH:mm:ss") + "</span>"); $logEntry.append("<span class='date'>" + moment(logEntry.date).format("HH:mm:ss") + "</span>");
$logEntry.append("<span class='log-level'>" + logEntry.level + "</span>");
var $message = $("<span class='message'></span>"); var $message = $("<span class='message'></span>");
$message.text(logEntry.message); $message.text(logEntry.message);
$logEntry.append($message); $logEntry.append($message);

View File

@ -160,7 +160,7 @@ public class CommitListPanel extends Panel {
if (command.revisions().isEmpty() && getCompareWith() != null) if (command.revisions().isEmpty() && getCompareWith() != null)
command.revisions(Lists.newArrayList(getCompareWith())); command.revisions(Lists.newArrayList(getCompareWith()));
commitHashes = command.call(null); commitHashes = command.call();
} catch (Exception e) { } catch (Exception e) {
if (e.getMessage() != null) if (e.getMessage() != null)
error(e.getMessage()); error(e.getMessage());

View File

@ -155,7 +155,7 @@ public class TextDiffPanel extends Panel implements SourceAware {
String oldPath = change.getOldBlobIdent().path; String oldPath = change.getOldBlobIdent().path;
if (oldPath != null) { if (oldPath != null) {
cmd.commitHash(getOldCommit().name()).file(oldPath); cmd.commitHash(getOldCommit().name()).file(oldPath);
for (BlameBlock blame: cmd.call(null)) { for (BlameBlock blame: cmd.call()) {
for (LinearRange range: blame.getRanges()) { for (LinearRange range: blame.getRanges()) {
for (int i=range.getFrom(); i<=range.getTo(); i++) for (int i=range.getFrom(); i<=range.getTo(); i++)
blameInfo.oldBlame.put(i, blame.getCommit()); blameInfo.oldBlame.put(i, blame.getCommit());
@ -165,7 +165,7 @@ public class TextDiffPanel extends Panel implements SourceAware {
String newPath = change.getNewBlobIdent().path; String newPath = change.getNewBlobIdent().path;
if (newPath != null) { if (newPath != null) {
cmd.commitHash(getNewCommit().name()).file(newPath); cmd.commitHash(getNewCommit().name()).file(newPath);
for (BlameBlock blame: cmd.call(null)) { for (BlameBlock blame: cmd.call()) {
for (LinearRange range: blame.getRanges()) { for (LinearRange range: blame.getRanges()) {
for (int i=range.getFrom(); i<=range.getTo(); i++) for (int i=range.getFrom(); i<=range.getTo(); i++)
blameInfo.newBlame.put(i, blame.getCommit()); blameInfo.newBlame.put(i, blame.getCommit());

View File

@ -17,7 +17,7 @@ import org.apache.wicket.request.resource.AbstractResource;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import io.onedev.server.OneDev; import io.onedev.server.OneDev;
import io.onedev.server.ci.job.log.LogManager; import io.onedev.server.ci.job.log.JobLogManager;
import io.onedev.server.entitymanager.BuildManager; import io.onedev.server.entitymanager.BuildManager;
import io.onedev.server.entitymanager.ProjectManager; import io.onedev.server.entitymanager.ProjectManager;
import io.onedev.server.model.Build; import io.onedev.server.model.Build;
@ -75,7 +75,7 @@ public class BuildLogDownloadResource extends AbstractResource {
@Override @Override
public void writeData(Attributes attributes) throws IOException { public void writeData(Attributes attributes) throws IOException {
try (InputStream is = OneDev.getInstance(LogManager.class).openLogStream(build)) { try (InputStream is = OneDev.getInstance(JobLogManager.class).openLogStream(build)) {
IOUtils.copy(is, attributes.getResponse().getOutputStream()); IOUtils.copy(is, attributes.getResponse().getOutputStream());
} }
} }

View File

@ -279,6 +279,9 @@ onedev.server = {
focusOn: function(componentId) { focusOn: function(componentId) {
if (componentId) if (componentId)
onedev.server.focus.doFocus($("#" + componentId)); onedev.server.focus.doFocus($("#" + componentId));
else if (document.activeElement != document.body)
document.activeElement.blur();
onedev.server.focus.$components = null; onedev.server.focus.$components = null;
}, },

View File

@ -839,7 +839,7 @@ public class SourceViewPanel extends BlobViewPanel implements Positionable, Sear
BlameCommand cmd = new BlameCommand(context.getProject().getGitDir()); BlameCommand cmd = new BlameCommand(context.getProject().getGitDir());
cmd.commitHash(commitHash).file(context.getBlobIdent().path); cmd.commitHash(commitHash).file(context.getBlobIdent().path);
for (BlameBlock blame: cmd.call(null)) { for (BlameBlock blame: cmd.call()) {
BlameInfo blameInfo = new BlameInfo(); BlameInfo blameInfo = new BlameInfo();
blameInfo.commitDate = DateUtils.formatDate(blame.getCommit().getCommitter().getWhen()); blameInfo.commitDate = DateUtils.formatDate(blame.getCommit().getCommitter().getWhen());
blameInfo.authorName = HtmlEscape.escapeHtml5(blame.getCommit().getAuthor().getName()); blameInfo.authorName = HtmlEscape.escapeHtml5(blame.getCommit().getAuthor().getName());

View File

@ -232,6 +232,7 @@ public abstract class BuildDetailPage extends ProjectPage implements InputContex
}; };
} }
target.focusComponent(null);
} }
@Override @Override

View File

@ -1,2 +1,3 @@
<wicket:extend> <wicket:extend>
<a wicket:id="test">test</a>
</wicket:extend> </wicket:extend>

View File

@ -3,6 +3,7 @@ package io.onedev.server.web.page.test;
import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.IHeaderResponse;
import org.apache.wicket.markup.head.JavaScriptHeaderItem; import org.apache.wicket.markup.head.JavaScriptHeaderItem;
import org.apache.wicket.markup.head.OnDomReadyHeaderItem; import org.apache.wicket.markup.head.OnDomReadyHeaderItem;
import org.apache.wicket.markup.html.link.Link;
import org.apache.wicket.request.mapper.parameter.PageParameters; import org.apache.wicket.request.mapper.parameter.PageParameters;
import io.onedev.server.web.page.base.BasePage; import io.onedev.server.web.page.base.BasePage;
@ -17,6 +18,14 @@ public class TestPage extends BasePage {
@Override @Override
protected void onInitialize() { protected void onInitialize() {
super.onInitialize(); super.onInitialize();
add(new Link<Void>("test") {
@Override
public void onClick() {
}
});
} }
@Override @Override

View File

@ -31,7 +31,7 @@ public class BlameCommandTest extends AbstractGitTest {
Collection<BlameBlock> blames = new BlameCommand(git.getRepository().getDirectory()) Collection<BlameBlock> blames = new BlameCommand(git.getRepository().getDirectory())
.commitHash(commitHash) .commitHash(commitHash)
.file("file") .file("file")
.call(null); .call();
assertEquals(1, blames.size()); assertEquals(1, blames.size());
assertEquals(commitHash + ": 0-8", blames.iterator().next().toString()); assertEquals(commitHash + ": 0-8", blames.iterator().next().toString());
@ -53,7 +53,7 @@ public class BlameCommandTest extends AbstractGitTest {
.commitHash(commitHash) .commitHash(commitHash)
.file("file") .file("file")
.range(new LinearRange(5, 8)) .range(new LinearRange(5, 8))
.call(null); .call();
assertEquals(2, blames.size()); assertEquals(2, blames.size());
assertEquals(commitHash + ": 8-8", getBlock(blames, commitHash).toString()); assertEquals(commitHash + ": 8-8", getBlock(blames, commitHash).toString());
@ -69,7 +69,7 @@ public class BlameCommandTest extends AbstractGitTest {
blames = new BlameCommand(git.getRepository().getDirectory()) blames = new BlameCommand(git.getRepository().getDirectory())
.commitHash(commitHash) .commitHash(commitHash)
.file("file") .file("file")
.call(null); .call();
commitHash = git.getRepository().resolve("master~1").name(); commitHash = git.getRepository().resolve("master~1").name();

View File

@ -78,7 +78,7 @@ public class LogCommandTest extends AbstractGitTest {
commits.add(commit); commits.add(commit);
} }
}.revisions(Lists.newArrayList("master")).call(null); }.revisions(Lists.newArrayList("master")).call();
assertEquals(2, commits.size()); assertEquals(2, commits.size());

View File

@ -4,17 +4,17 @@ import java.io.File;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import io.onedev.commons.utils.LockUtils; import io.onedev.commons.utils.LockUtils;
import io.onedev.server.ci.job.DependencyPopulator; import io.onedev.server.ci.job.DependencyPopulator;
import io.onedev.server.ci.job.JobOutcome; import io.onedev.server.ci.job.JobOutcome;
import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.model.Build; import io.onedev.server.model.Build;
public class ArtifactsPopulator implements DependencyPopulator { public class ArtifactsPopulator implements DependencyPopulator {
@Override @Override
public void populate(Build dependency, File workspace, Logger logger) { public void populate(Build dependency, File workspace, JobLogger logger) {
File outcomeDir = JobOutcome.getOutcomeDir(dependency, JobArtifacts.DIR); File outcomeDir = JobOutcome.getOutcomeDir(dependency, JobArtifacts.DIR);
LockUtils.read(JobOutcome.getLockKey(dependency, JobArtifacts.DIR), new Callable<Void>() { LockUtils.read(JobOutcome.getLockKey(dependency, JobArtifacts.DIR), new Callable<Void>() {

View File

@ -4,11 +4,10 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import org.slf4j.Logger;
import io.onedev.commons.utils.FileUtils; import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.LockUtils; import io.onedev.commons.utils.LockUtils;
import io.onedev.server.ci.job.JobOutcome; import io.onedev.server.ci.job.JobOutcome;
import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.model.Build; import io.onedev.server.model.Build;
import io.onedev.server.web.editable.annotation.Editable; import io.onedev.server.web.editable.annotation.Editable;
@ -20,7 +19,7 @@ public class JobArtifacts extends JobOutcome {
public static final String DIR = "artifacts"; public static final String DIR = "artifacts";
@Override @Override
public void process(Build build, File workspace, Logger logger) { public void process(Build build, File workspace, JobLogger logger) {
File outcomeDir = getOutcomeDir(build, DIR); File outcomeDir = getOutcomeDir(build, DIR);
FileUtils.createDir(outcomeDir); FileUtils.createDir(outcomeDir);

View File

@ -8,11 +8,11 @@ import java.util.concurrent.Callable;
import org.apache.commons.lang3.SerializationUtils; import org.apache.commons.lang3.SerializationUtils;
import org.hibernate.validator.constraints.NotEmpty; import org.hibernate.validator.constraints.NotEmpty;
import org.slf4j.Logger;
import io.onedev.commons.utils.FileUtils; import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.LockUtils; import io.onedev.commons.utils.LockUtils;
import io.onedev.server.ci.job.JobOutcome; import io.onedev.server.ci.job.JobOutcome;
import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.model.Build; import io.onedev.server.model.Build;
import io.onedev.server.web.editable.annotation.Editable; import io.onedev.server.web.editable.annotation.Editable;
@ -50,7 +50,7 @@ public class JobHtmlReport extends JobOutcome {
} }
@Override @Override
public void process(Build build, File workspace, Logger logger) { public void process(Build build, File workspace, JobLogger logger) {
File outcomeDir = getOutcomeDir(build, DIR); File outcomeDir = getOutcomeDir(build, DIR);
FileUtils.createDir(outcomeDir); FileUtils.createDir(outcomeDir);
@ -78,7 +78,7 @@ public class JobHtmlReport extends JobOutcome {
} }
} }
} else { } else {
logger.warn("Html report start page not found: " + startPage.getAbsolutePath()); logger.log("ERROR: Html report start page not found: " + startPage.getAbsolutePath());
} }
return null; return null;
} }

View File

@ -3,22 +3,25 @@ package io.onedev.server.plugin.kubernetes;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Base64;
import java.util.Collection;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
import org.apache.commons.codec.Charsets; import org.apache.commons.codec.Charsets;
import org.hibernate.validator.constraints.NotEmpty; import org.hibernate.validator.constraints.NotEmpty;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.Yaml;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Splitter; import com.google.common.base.Splitter;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
@ -26,11 +29,13 @@ import com.google.common.collect.Lists;
import io.onedev.commons.utils.ExceptionUtils; import io.onedev.commons.utils.ExceptionUtils;
import io.onedev.commons.utils.FileUtils; import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.Maps; import io.onedev.commons.utils.Maps;
import io.onedev.commons.utils.StringUtils;
import io.onedev.commons.utils.command.Commandline; import io.onedev.commons.utils.command.Commandline;
import io.onedev.commons.utils.command.ExecuteResult;
import io.onedev.commons.utils.command.LineConsumer; import io.onedev.commons.utils.command.LineConsumer;
import io.onedev.k8shelper.KubernetesHelper;
import io.onedev.server.OneDev;
import io.onedev.server.OneException; import io.onedev.server.OneException;
import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.entitymanager.SettingManager;
import io.onedev.server.model.support.JobContext; import io.onedev.server.model.support.JobContext;
import io.onedev.server.model.support.JobExecutor; import io.onedev.server.model.support.JobExecutor;
import io.onedev.server.plugin.kubernetes.KubernetesExecutor.TestData; import io.onedev.server.plugin.kubernetes.KubernetesExecutor.TestData;
@ -125,7 +130,20 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
} }
@Override @Override
public void execute(String jobId, JobContext context) { public void execute(String jobToken, JobContext jobContext) {
execute(jobContext.getEnvironment(), jobToken, jobContext.getLogger(), jobContext);
}
@Override
public void test(TestData testData) {
execute(testData.getDockerImage(), KubernetesResource.TEST_JOB_TOKEN, new JobLogger() {
@Override
public void log(String message, Throwable t) {
logger.info(message, t);
}
}, null);
} }
@Override @Override
@ -147,28 +165,30 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
return cmdline; return cmdline;
} }
private String createResource(Map<Object, Object> resourceData, Logger logger) { private String createResource(Map<Object, Object> resourceDef, JobLogger logger) {
Commandline kubectl = newKubeCtl(); Commandline kubectl = newKubeCtl();
File file = null; File file = null;
try { try {
AtomicReference<String> resourceNameRef = new AtomicReference<String>(null); AtomicReference<String> resourceNameRef = new AtomicReference<String>(null);
file = File.createTempFile("k8s", ".yaml"); file = File.createTempFile("k8s", ".yaml");
FileUtils.writeFile(file, new Yaml().dump(resourceData), Charsets.UTF_8.name());
kubectl.addArgs("create", "-f", file.getAbsolutePath()); String resourceYaml = new Yaml().dump(resourceDef);
KubernetesExecutor.logger.trace("Kubernetes: creating resource with yaml:\n" + resourceYaml);
FileUtils.writeFile(file, resourceYaml, Charsets.UTF_8.name());
kubectl.addArgs("create", "-f", file.getAbsolutePath(), "-o", "jsonpath={.metadata.name}");
kubectl.execute(new LineConsumer() { kubectl.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.info(line); resourceNameRef.set(line);
line = StringUtils.substringAfter(line, "/");
resourceNameRef.set(StringUtils.substringBefore(line, " "));
} }
}, new LineConsumer() { }, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.error(line); logger.log("Kubernetes: " + line);
} }
}).checkReturnCode(); }).checkReturnCode();
@ -182,37 +202,36 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
} }
} }
private void deleteResource(String resourceType, String resourceName, Logger logger) { private void deleteResource(String resourceType, String resourceName, JobLogger logger) {
Commandline cmd = newKubeCtl(); Commandline cmd = newKubeCtl();
cmd.addArgs("delete", resourceType, resourceName, "--namespace=" + getNamespace()); cmd.addArgs("delete", resourceType, resourceName, "--namespace=" + getNamespace());
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.info(line); KubernetesExecutor.logger.debug(line);
} }
}, new LineConsumer() { }, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.error(line); logger.log("Kubernetes: " + line);
} }
}).checkReturnCode(); }).checkReturnCode();
} }
private void createNamespaceIfNotExist(Logger logger) { private void createNamespaceIfNotExist(JobLogger logger) {
Commandline cmd = newKubeCtl(); Commandline cmd = newKubeCtl();
cmd.addArgs("get", "namespaces"); String query = String.format("{.items[?(@.metadata.name=='%s')]}", getNamespace());
cmd.addArgs("get", "namespaces", "-o", "jsonpath=" + query);
AtomicBoolean hasNamespace = new AtomicBoolean(false); AtomicBoolean hasNamespace = new AtomicBoolean(false);
cmd.execute(new LineConsumer() { cmd.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.debug(line);
if (line.startsWith(getNamespace() + " "))
hasNamespace.set(true); hasNamespace.set(true);
} }
@ -220,7 +239,7 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.error(line); logger.log("Kubernetes: " + line);
} }
}).checkReturnCode(); }).checkReturnCode();
@ -232,28 +251,20 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.debug(line); KubernetesExecutor.logger.debug(line);
} }
}, new LineConsumer() { }, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.error(line); logger.log("Kubernetes: " + line);
} }
}).checkReturnCode(); }).checkReturnCode();
} }
} }
private String getResourceNamePrefix() {
try {
return "onedev-ci-" + InetAddress.getLocalHost().getHostName() + "-" + getName() + "-";
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
}
private List<Object> getImagePullSecretsData() { private List<Object> getImagePullSecretsData() {
List<Object> data = new ArrayList<>(); List<Object> data = new ArrayList<>();
if (getImagePullSecrets() != null) { if (getImagePullSecrets() != null) {
@ -270,53 +281,121 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
return data; return data;
} }
private String getOS(Logger logger) { private String getOSName(JobLogger logger) {
logger.info("Checking OS..."); logger.log("Checking working node OS...");
Commandline kubectl = newKubeCtl(); Commandline kubectl = newKubeCtl();
kubectl.addArgs("get", "nodes", "-o=jsonpath={..nodeInfo.operatingSystem}"); kubectl.addArgs("get", "nodes", "-o", "jsonpath={..nodeInfo.operatingSystem}");
for (NodeSelectorEntry entry: getNodeSelector()) for (NodeSelectorEntry entry: getNodeSelector())
kubectl.addArgs("-l", entry.getLabelName() + "=" + entry.getLabelValue()); kubectl.addArgs("-l", entry.getLabelName() + "=" + entry.getLabelValue());
AtomicReference<String> osRef = new AtomicReference<>(null); AtomicReference<String> osNameRef = new AtomicReference<>(null);
kubectl.execute(new LineConsumer() { kubectl.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
osRef.set(line); osNameRef.set(line);
} }
}, new LineConsumer() { }, new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.error(line); logger.log("Kubernetes: " + line);
} }
}).checkReturnCode(); }).checkReturnCode();
return Preconditions.checkNotNull(osRef.get(), "No applicable working nodes for this executor"); String osName = osNameRef.get();
if (osName != null) {
logger.log(String.format("OS of working node is '%s'", osName));
return osName;
} else {
throw new OneException("No applicable working nodes found for executor '" + getName() + "'");
}
} }
@Override private String getServerUrl() {
public void test(TestData testData) { return OneDev.getInstance(SettingManager.class).getSystemSetting().getServerUrl();
}
private List<Map<Object, Object>> getSecretEnvs(String secretName, Collection<String> secretKeys) {
List<Map<Object, Object>> secretEnvs = new ArrayList<>();
for (String secretKey: secretKeys) {
Map<Object, Object> secretEnv = new LinkedHashMap<>();
secretEnv.put("name", secretKey);
secretEnv.put("valueFrom", Maps.newLinkedHashMap("secretKeyRef", Maps.newLinkedHashMap(
"name", secretName,
"key", secretKey)));
secretEnvs.add(secretEnv);
}
return secretEnvs;
}
private void execute(String dockerImage, String jobToken, JobLogger logger, @Nullable JobContext jobContext) {
createNamespaceIfNotExist(logger); createNamespaceIfNotExist(logger);
String os = getOS(logger); Map<String, String> secrets = Maps.newLinkedHashMap(KubernetesHelper.ENV_JOB_TOKEN, jobToken);
String secretName = createSecret(secrets, logger);
try {
String osName = getOSName(logger);
Map<String, Object> podSpec = new LinkedHashMap<>(); Map<String, Object> podSpec = new LinkedHashMap<>();
Map<Object, Object> containerSpec = Maps.newHashMap(
"name", "test",
"image", testData.getDockerImage());
if (os.equalsIgnoreCase("linux")) { Map<Object, Object> mainContainerSpec = Maps.newHashMap(
containerSpec.put("command", Lists.newArrayList("sh")); "name", "main",
containerSpec.put("args", Lists.newArrayList("-c", "echo hello from container")); "image", dockerImage);
Map<String, String> emptyDirMount = new LinkedHashMap<>();
String classPath;
if (osName.equalsIgnoreCase("linux")) {
mainContainerSpec.put("command", Lists.newArrayList("sh"));
mainContainerSpec.put("args", Lists.newArrayList(".onedev/job-commands-wrapper.sh"));
emptyDirMount.put("mountPath", "/onedev-workspace");
classPath = "/k8s-helper/*";
} else { } else {
containerSpec.put("command", Lists.newArrayList("cmd")); mainContainerSpec.put("command", Lists.newArrayList("cmd"));
containerSpec.put("args", Lists.newArrayList("/c", "echo hello from container")); mainContainerSpec.put("args", Lists.newArrayList("/c", ".onedev\\job-commands-wrapper.bat"));
emptyDirMount.put("mountPath", "C:\\onedev-workspace");
classPath = "C:\\k8s-helper\\*";
} }
mainContainerSpec.put("workingDir", emptyDirMount.get("mountPath"));
emptyDirMount.put("name", "workspace");
mainContainerSpec.put("volumeMounts", Lists.<Object>newArrayList(emptyDirMount));
Map<Object, Object> resources = Maps.newLinkedHashMap("requests", Maps.newLinkedHashMap("cpu", "1"));
mainContainerSpec.put("resources", resources);
podSpec.put("containers", Lists.<Object>newArrayList(containerSpec)); List<Map<Object, Object>> envs = new ArrayList<>();
Map<Object, Object> serverUrlEnv = Maps.newLinkedHashMap(
"name", KubernetesHelper.ENV_SERVER_URL,
"value", getServerUrl());
envs.add(serverUrlEnv);
envs.addAll(getSecretEnvs(secretName, secrets.keySet()));
List<String> sidecarArgs = Lists.newArrayList("-classpath", classPath, "io.onedev.k8shelper.SideCar");
List<String> initArgs = Lists.newArrayList("-classpath", classPath, "io.onedev.k8shelper.Init");
if (jobContext == null) {
sidecarArgs.add("test");
initArgs.add("test");
}
Map<Object, Object> sidecarContainerSpec = Maps.newHashMap(
"name", "sidecar",
"image", "1dev/k8s-helper",
"command", Lists.newArrayList("java"),
"args", sidecarArgs,
"env", envs,
"volumeMounts", Lists.<Object>newArrayList(emptyDirMount));
Map<Object, Object> initContainerSpec = Maps.newHashMap(
"name", "init",
"image", "1dev/k8s-helper",
"command", Lists.newArrayList("java"),
"args", initArgs,
"env", envs,
"resources", resources,
"volumeMounts", Lists.<Object>newArrayList(emptyDirMount));
podSpec.put("containers", Lists.<Object>newArrayList(mainContainerSpec, sidecarContainerSpec));
podSpec.put("initContainers", Lists.<Object>newArrayList(initContainerSpec));
Map<String, String> nodeSelectorData = getNodeSelectorData(); Map<String, String> nodeSelectorData = getNodeSelectorData();
if (!nodeSelectorData.isEmpty()) if (!nodeSelectorData.isEmpty())
@ -327,113 +406,301 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
if (getServiceAccount() != null) if (getServiceAccount() != null)
podSpec.put("serviceAccountName", getServiceAccount()); podSpec.put("serviceAccountName", getServiceAccount());
podSpec.put("restartPolicy", "Never"); podSpec.put("restartPolicy", "Never");
Map<Object, Object> podData = Maps.newLinkedHashMap( podSpec.put("volumes", Lists.<Object>newArrayList(Maps.newLinkedHashMap(
"name", "workspace",
"emptyDir", Maps.newLinkedHashMap())));
Map<Object, Object> podDef = Maps.newLinkedHashMap(
"apiVersion", "v1", "apiVersion", "v1",
"kind", "Pod", "kind", "Pod",
"metadata", Maps.newLinkedHashMap( "metadata", Maps.newLinkedHashMap(
"generateName", getResourceNamePrefix() + "test-", "generateName", "job-",
"namespace", getNamespace()), "namespace", getNamespace()),
"spec", podSpec); "spec", podSpec);
String podName = createResource(podData, logger); String podName = createResource(podDef, logger);
try { try {
waitForPod(podName, logger); logger.log("Preparing job environment...");
watchPod(podName, new StatusChecker() {
@Override
public StopWatch check(JsonNode statusNode) {
JsonNode initContainerStatusesNode = statusNode.get("initContainerStatuses");
if (initContainerStatusesNode != null) {
for (JsonNode initContainerStatusNode: initContainerStatusesNode) {
JsonNode stateNode = initContainerStatusNode.get("state");
if (initContainerStatusNode.get("name").asText().equals("init")
&& (stateNode.get("running") != null || stateNode.get("terminated") != null)) {
return new StopWatch(null);
}
}
}
return null;
}
}, logger);
if (jobContext != null)
jobContext.notifyJobRunning();
waitForContainerStop(podName, "init", logger);
watchPod(podName, new StatusChecker() {
@Override
public StopWatch check(JsonNode statusNode) {
JsonNode initContainerStatusesNode = statusNode.get("initContainerStatuses");
String errorMessage = getContainerError(initContainerStatusesNode, "init");
if (errorMessage != null)
return new StopWatch(new OneException("Error executing init logic: " + errorMessage));
JsonNode containerStatusesNode = statusNode.get("containerStatuses");
if (isContainerStarted(containerStatusesNode, "main"))
return new StopWatch(null);
else
return null;
}
}, logger);
waitForContainerStop(podName, "main", logger);
watchPod(podName, new StatusChecker() {
@Override
public StopWatch check(JsonNode statusNode) {
JsonNode containerStatusesNode = statusNode.get("containerStatuses");
String errorMessage = getContainerError(containerStatusesNode, "main");
if (errorMessage != null)
return new StopWatch(new OneException(errorMessage));
if (isContainerStarted(containerStatusesNode, "sidecar"))
return new StopWatch(null);
else
return null;
}
}, logger);
waitForContainerStop(podName, "sidecar", logger);
watchPod(podName, new StatusChecker() {
@Override
public StopWatch check(JsonNode statusNode) {
JsonNode containerStatusesNode = statusNode.get("containerStatuses");
String errorMessage = getContainerError(containerStatusesNode, "sidecar");
if (errorMessage != null)
return new StopWatch(new OneException("Error executing sidecar logic: " + errorMessage));
else if (isContainerStopped(containerStatusesNode, "sidecar"))
return new StopWatch(null);
else
return null;
}
}, logger);
} finally { } finally {
deleteResource("pod", podName, logger); deleteResource("pod", podName, logger);
} }
} finally {
deleteResource("secret", secretName, logger);
}
} }
private void waitForPod(String podName, Logger logger) { @Nullable
Thread thread = Thread.currentThread(); private String getContainerError(@Nullable JsonNode containerStatusesNode, String containerName) {
if (containerStatusesNode != null) {
for (JsonNode containerStatusNode: containerStatusesNode) {
JsonNode stateNode = containerStatusNode.get("state");
if (containerStatusNode.get("name").asText().equals(containerName)) {
JsonNode terminatedNode = stateNode.get("terminated");
if (terminatedNode != null) {
String reason = terminatedNode.get("reason").asText();
if (!reason.equals("Completed")) {
JsonNode messageNode = terminatedNode.get("message");
if (messageNode != null) {
return messageNode.asText();
} else {
JsonNode exitCodeNode = terminatedNode.get("exitCode");
if (exitCodeNode != null && exitCodeNode.asInt() != 0)
return "exit code: " + exitCodeNode.asText();
else
return reason;
}
}
}
break;
}
}
}
return null;
}
AtomicBoolean podStartedRef = new AtomicBoolean(false); private boolean isContainerStarted(@Nullable JsonNode containerStatusesNode, String containerName) {
AtomicReference<String> podErrorRef = new AtomicReference<String>(null); if (containerStatusesNode != null) {
for (JsonNode containerStatusNode: containerStatusesNode) {
if (containerStatusNode.get("name").asText().equals(containerName)) {
JsonNode stateNode = containerStatusNode.get("state");
if (stateNode.get("running") != null || stateNode.get("terminated") != null)
return true;
break;
}
}
}
return false;
}
private boolean isContainerStopped(@Nullable JsonNode containerStatusesNode, String containerName) {
if (containerStatusesNode != null) {
for (JsonNode containerStatusNode: containerStatusesNode) {
if (containerStatusNode.get("name").asText().equals(containerName)) {
JsonNode stateNode = containerStatusNode.get("state");
if (stateNode.get("terminated") != null)
return true;
break;
}
}
}
return false;
}
private String createSecret(Map<String, String> secrets, JobLogger logger) {
Map<String, String> encodedSecrets = new LinkedHashMap<>();
for (Map.Entry<String, String> entry: secrets.entrySet())
encodedSecrets.put(entry.getKey(), Base64.getEncoder().encodeToString(entry.getValue().getBytes(Charsets.UTF_8)));
Map<Object, Object> secretDef = Maps.newLinkedHashMap(
"apiVersion", "v1",
"kind", "Secret",
"metadata", Maps.newLinkedHashMap(
"generateName", "secret-",
"namespace", getNamespace()),
"data", encodedSecrets);
return createResource(secretDef, logger);
}
private void watchPod(String podName, StatusChecker statusChecker, JobLogger logger) {
Commandline kubectl = newKubeCtl(); Commandline kubectl = newKubeCtl();
kubectl.addArgs("get", "event", "-n", getNamespace(), "--no-headers",
"--field-selector", "involvedObject.name=" + podName, "--watch"); ObjectMapper mapper = new ObjectMapper();
AtomicReference<StopWatch> stopWatchRef = new AtomicReference<>(null);
StringBuilder json = new StringBuilder();
kubectl.addArgs("get", "pod", podName, "-n", getNamespace(), "--watch", "-o", "json");
Thread thread = Thread.currentThread();
try { try {
kubectl.execute(new LineConsumer() { kubectl.execute(new LineConsumer() {
@Override @Override
public void consume(String line) { public void consume(String line) {
StringTokenizer tokenizer = new StringTokenizer(line); if (line.startsWith("{")) {
tokenizer.nextToken(); json.append("{").append("\n");
String type = tokenizer.nextToken(); } else if (line.startsWith("}")) {
tokenizer.nextToken(); json.append("}");
tokenizer.nextToken();
String message = tokenizer.nextToken("\n").trim();
if (type.equals("Normal"))
logger.info(message);
else
logger.error(message);
if (!type.equals("Normal") && !message.contains("Insufficient cpu")) {
podErrorRef.set(message);
thread.interrupt();
} else if (message.startsWith("Started container")) {
podStartedRef.set(true);
thread.interrupt();
}
}
}, new LineConsumer() {
@Override
public void consume(String line) {
logger.error(line);
}
});
throw new OneException("Unexpected end of pod event watching");
} catch (Exception e) {
if (ExceptionUtils.find(e, InterruptedException.class) != null) {
if (podStartedRef.get()) {
kubectl = newKubeCtl();
kubectl.addArgs("logs", podName, "-n", getNamespace(), "--follow");
while (true) {
AtomicReference<Boolean> containerCreatingRef = new AtomicReference<Boolean>(false);
ExecuteResult result = kubectl.execute(new LineConsumer() {
@Override
public void consume(String line) {
logger.info(line);
}
}, new LineConsumer() {
@Override
public void consume(String line) {
if (line.contains("is waiting to start: ContainerCreating"))
containerCreatingRef.set(true);
else
logger.error(line);
}
});
if (containerCreatingRef.get()) {
try { try {
Thread.sleep(1000); process(mapper.readTree(json.toString()));
} catch (InterruptedException e2) { } catch (IOException e) {
throw new RuntimeException(e2); KubernetesExecutor.logger.error("Error reading json", e);
} }
json.setLength(0);
} else { } else {
result.checkReturnCode(); json.append(line).append("\n");
}
}
private void process(JsonNode podNode) {
String errorMessage = null;
JsonNode statusNode = podNode.get("status");
JsonNode conditionsNode = statusNode.get("conditions");
if (conditionsNode != null) {
for (JsonNode conditionNode: conditionsNode) {
if (conditionNode.get("type").asText().equals("PodScheduled")
&& conditionNode.get("status").asText().equals("False")
&& conditionNode.get("reason").asText().equals("Unschedulable")) {
logger.log(conditionNode.get("message").asText());
}
}
}
Collection<JsonNode> containerStatusNodes = new ArrayList<>();
JsonNode initContainerStatusesNode = statusNode.get("initContainerStatuses");
if (initContainerStatusesNode != null) {
for (JsonNode containerStatusNode: initContainerStatusesNode)
containerStatusNodes.add(containerStatusNode);
}
JsonNode containerStatusesNode = statusNode.get("containerStatuses");
if (containerStatusesNode != null) {
for (JsonNode containerStatusNode: containerStatusesNode)
containerStatusNodes.add(containerStatusNode);
}
for (JsonNode containerStatusNode: containerStatusNodes) {
JsonNode stateNode = containerStatusNode.get("state");
JsonNode waitingNode = stateNode.get("waiting");
if (waitingNode != null) {
String reason = waitingNode.get("reason").asText();
if (reason.equals("ErrImagePull") || reason.equals("InvalidImageName")
|| reason.equals("ImageInspectError") || reason.equals("ErrImageNeverPull")
|| reason.equals("RegistryUnavailable")) {
JsonNode messageNode = waitingNode.get("message");
if (messageNode != null)
errorMessage = messageNode.asText();
else
errorMessage = reason;
break; break;
} }
} }
} else if (podErrorRef.get() != null) { }
throw new OneException(podErrorRef.get()); if (errorMessage != null)
stopWatchRef.set(new StopWatch(new OneException(errorMessage)));
else
stopWatchRef.set(statusChecker.check(statusNode));
if (stopWatchRef.get() != null)
thread.interrupt();
}
}, new LineConsumer() {
@Override
public void consume(String line) {
logger.log("Kubernetes: " + line);
}
}).checkReturnCode();
throw new OneException("Unexpected end of pod watching");
} catch (Exception e) {
StopWatch stopWatch = stopWatchRef.get();
if (stopWatch != null) {
if (stopWatch.getException() != null)
throw stopWatch.getException();
} else { } else {
throw e; throw ExceptionUtils.unchecked(e);
}
} else {
throw e;
} }
} }
} }
private void waitForContainerStop(String podName, String containerName, JobLogger logger) {
Commandline kubectl = newKubeCtl();
kubectl.addArgs("logs", podName, "-c", containerName, "-n", getNamespace(), "--follow");
kubectl.execute(new LineConsumer() {
@Override
public void consume(String line) {
logger.log(line);
}
}, new LineConsumer() {
@Override
public void consume(String line) {
logger.log(line);
}
}).checkReturnCode();
}
@Editable @Editable
public static class NodeSelectorEntry implements Serializable { public static class NodeSelectorEntry implements Serializable {
@ -463,6 +730,27 @@ public class KubernetesExecutor extends JobExecutor implements Testable<TestData
} }
private static interface StatusChecker {
StopWatch check(JsonNode statusNode);
}
private static class StopWatch {
private final RuntimeException exception;
public StopWatch(@Nullable RuntimeException exception) {
this.exception = exception;
}
@Nullable
public RuntimeException getException() {
return exception;
}
}
@Editable(name="Specify a Docker Image to Test Against") @Editable(name="Specify a Docker Image to Test Against")
public static class TestData implements Serializable { public static class TestData implements Serializable {

View File

@ -1,113 +0,0 @@
package io.onedev.server.plugin.kubernetes;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.apache.commons.lang.SystemUtils;
import io.onedev.commons.utils.FileUtils;
import io.onedev.commons.utils.command.Commandline;
import io.onedev.commons.utils.command.LineConsumer;
import io.onedev.k8shelper.KubernetesHelper;
import io.onedev.server.OneDev;
import io.onedev.server.entitymanager.SettingManager;
import io.onedev.server.model.support.JobContext;
import io.onedev.server.model.support.JobExecutor;
import io.onedev.server.web.editable.annotation.Editable;
@Editable(order=400)
public class KubernetesHelperTester extends JobExecutor {
private static final long serialVersionUID = 1L;
@Override
public void execute(String jobId, JobContext context) {
context.notifyJobRunning();
SettingManager settingManager = OneDev.getInstance(SettingManager.class);
String serverUrl = settingManager.getSystemSetting().getServerUrl();
File workspace = FileUtils.createTempDir("k8s-workspace");
try {
KubernetesHelper.init(serverUrl, jobId, workspace);
Future<?> sidecar = OneDev.getInstance(ExecutorService.class).submit(new Runnable() {
@Override
public void run() {
KubernetesHelper.sidecar(serverUrl, jobId, workspace);
}
});
try {
Commandline cmd;
if (SystemUtils.IS_OS_WINDOWS) {
File scriptFile = new File(workspace, "onedev-job-commands.bat");
try {
FileUtils.writeLines(scriptFile, context.getCommands(), "\r\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
cmd = new Commandline("cmd");
cmd.addArgs("/c", scriptFile.getAbsolutePath());
} else {
File scriptFile = new File(workspace, "onedev-job-commands.sh");
try {
FileUtils.writeLines(scriptFile, context.getCommands(), "\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
cmd = new Commandline("sh");
cmd.addArgs(scriptFile.getAbsolutePath());
}
cmd.workingDir(workspace);
cmd.environments(context.getEnvVars());
cmd.execute(new LineConsumer() {
@Override
public void consume(String line) {
context.getLogger().info(line);
}
}, new LineConsumer() {
@Override
public void consume(String line) {
context.getLogger().error(line);
}
}).checkReturnCode();
} finally {
try {
new File(workspace, KubernetesHelper.JOB_FINISH_FILE).createNewFile();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
try {
sidecar.get();
} catch (InterruptedException e) {
sidecar.cancel(true);
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
} finally {
FileUtils.deleteDir(workspace);
}
}
@Override
public void checkCaches() {
}
@Override
public void cleanDir(File dir) {
FileUtils.cleanDir(dir);
}
}

View File

@ -6,7 +6,6 @@ import org.glassfish.jersey.server.ResourceConfig;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import io.onedev.commons.launcher.bootstrap.Bootstrap;
import io.onedev.commons.launcher.loader.AbstractPluginModule; import io.onedev.commons.launcher.loader.AbstractPluginModule;
import io.onedev.commons.launcher.loader.ImplementationProvider; import io.onedev.commons.launcher.loader.ImplementationProvider;
import io.onedev.server.model.support.JobExecutor; import io.onedev.server.model.support.JobExecutor;
@ -32,10 +31,7 @@ public class KubernetesModule extends AbstractPluginModule {
@Override @Override
public Collection<Class<?>> getImplementations() { public Collection<Class<?>> getImplementations() {
Collection<Class<?>> implementations = Sets.newHashSet(KubernetesExecutor.class); return Sets.newHashSet(KubernetesExecutor.class);
if (Bootstrap.sandboxMode)
implementations.add(KubernetesHelperTester.class);
return implementations;
} }
}); });

View File

@ -6,6 +6,7 @@ import java.io.OutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.UUID;
import javax.inject.Inject; import javax.inject.Inject;
import javax.inject.Singleton; import javax.inject.Singleton;
@ -32,6 +33,8 @@ import io.onedev.server.model.support.JobContext;
@Singleton @Singleton
public class KubernetesResource { public class KubernetesResource {
public static final String TEST_JOB_TOKEN = UUID.randomUUID().toString();
private final JobManager jobManager; private final JobManager jobManager;
@Context @Context
@ -84,13 +87,23 @@ public class KubernetesResource {
return Response.ok().build(); return Response.ok().build();
} }
@GET
@Path("/test")
public Response test() {
String jobToken = request.getHeader(JobManager.JOB_TOKEN_HTTP_HEADER);
if (TEST_JOB_TOKEN.equals(jobToken))
return Response.ok().build();
else
return Response.status(400).entity("Invalid or no job token").build();
}
private JobContext getJobContext() { private JobContext getJobContext() {
String jobId = request.getHeader(JobManager.JOB_ID_HTTP_HEADER); String jobToken = request.getHeader(JobManager.JOB_TOKEN_HTTP_HEADER);
if (jobId == null) if (jobToken == null)
throw new OneException("Http header '" + JobManager.JOB_ID_HTTP_HEADER + "' is expected"); throw new OneException("Http header '" + JobManager.JOB_TOKEN_HTTP_HEADER + "' is expected");
JobContext context = jobManager.getJobContext(jobId); JobContext context = jobManager.getJobContext(jobToken);
if (context == null) if (context == null)
throw new OneException("No job context found for specified job id"); throw new OneException("No job context found for specified job token");
return context; return context;
} }

View File

@ -83,6 +83,7 @@ public class DefaultMavenCISpecProvider implements DefaultCISpecProvider {
* OneDev using extracted version for current build * OneDev using extracted version for current build
*/ */
job.setCommands("" job.setCommands(""
+ "echo \"Detecting project version (may require some time while downloading maven dependencies)...\"\n"
+ "buildVersion=$(mvn org.apache.maven.plugins:maven-help-plugin:3.1.0:evaluate -Dexpression=project.version -q -DforceStdout)\n" + "buildVersion=$(mvn org.apache.maven.plugins:maven-help-plugin:3.1.0:evaluate -Dexpression=project.version -q -DforceStdout)\n"
+ "echo \"##onedev[SetBuildVersion '$buildVersion']\"\n" + "echo \"##onedev[SetBuildVersion '$buildVersion']\"\n"
+ "echo\n" + "echo\n"

View File

@ -1,28 +0,0 @@
package io.onedev.server.plugin.maven;
import javax.inject.Singleton;
import io.onedev.server.ci.job.log.LogLevel;
import io.onedev.server.ci.job.log.LogNormalizer;
@Singleton
public class MavenLogNormalizer implements LogNormalizer {
@Override
public Normalized normalize(String message) {
if (message.startsWith("[INFO] ")) {
return new Normalized(LogLevel.INFO, message.substring("[INFO] ".length()));
} else if (message.startsWith("[ERROR] ")) {
return new Normalized(LogLevel.ERROR, message.substring("[ERROR] ".length()));
} else if (message.startsWith("[WARNING] ")) {
return new Normalized(LogLevel.WARN, message.substring("[WARNING] ".length()));
} else if (message.startsWith("[DEBUG] ")) {
return new Normalized(LogLevel.DEBUG, message.substring("[DEBUG] ".length()));
} else if (message.startsWith("[TRACE] ")) {
return new Normalized(LogLevel.TRACE, message.substring("[TRACE] ".length()));
} else {
return null;
}
}
}

View File

@ -2,7 +2,6 @@ package io.onedev.server.plugin.maven;
import io.onedev.commons.launcher.loader.AbstractPluginModule; import io.onedev.commons.launcher.loader.AbstractPluginModule;
import io.onedev.server.ci.DefaultCISpecProvider; import io.onedev.server.ci.DefaultCISpecProvider;
import io.onedev.server.ci.job.log.LogNormalizer;
/** /**
* NOTE: Do not forget to rename moduleClass property defined in the pom if you've renamed this class. * NOTE: Do not forget to rename moduleClass property defined in the pom if you've renamed this class.
@ -16,7 +15,6 @@ public class MavenModule extends AbstractPluginModule {
// put your guice bindings here // put your guice bindings here
contribute(DefaultCISpecProvider.class, DefaultMavenCISpecProvider.class); contribute(DefaultCISpecProvider.class, DefaultMavenCISpecProvider.class);
contribute(LogNormalizer.class, MavenLogNormalizer.class);
} }
} }

View File

@ -37,6 +37,7 @@ import io.onedev.server.ci.job.cache.CacheAllocation;
import io.onedev.server.ci.job.cache.CacheCallable; import io.onedev.server.ci.job.cache.CacheCallable;
import io.onedev.server.ci.job.cache.CacheRunner; import io.onedev.server.ci.job.cache.CacheRunner;
import io.onedev.server.ci.job.cache.JobCache; import io.onedev.server.ci.job.cache.JobCache;
import io.onedev.server.ci.job.log.JobLogger;
import io.onedev.server.model.support.JobContext; import io.onedev.server.model.support.JobContext;
import io.onedev.server.model.support.JobExecutor; import io.onedev.server.model.support.JobExecutor;
import io.onedev.server.plugin.serverdocker.ServerDockerExecutor.TestData; import io.onedev.server.plugin.serverdocker.ServerDockerExecutor.TestData;
@ -122,8 +123,8 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private String getImageOS(Logger logger, String image) { private String getImageOS(JobLogger logger, String image) {
logger.info("Checking image OS..."); logger.log("Checking image OS...");
Commandline docker = getDocker(); Commandline docker = getDocker();
docker.addArgs("inspect", image); docker.addArgs("inspect", image);
@ -132,11 +133,11 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.debug(line); logger.log(line);
output.append(line).append("\n"); output.append(line).append("\n");
} }
}, newErrorLogger(logger), logger).checkReturnCode(); }, newCommandLogger(logger)).checkReturnCode();
Map<String, Object> map; Map<String, Object> map;
try { try {
@ -159,36 +160,36 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
} }
@Override @Override
public void execute(String jobId, JobContext context) { public void execute(String jobToken, JobContext jobContext) {
Logger logger = context.getLogger(); JobLogger logger = jobContext.getLogger();
getCapacityRunner().call(new Callable<Void>() { getCapacityRunner().call(new Callable<Void>() {
@Override @Override
public Void call() { public Void call() {
return new CacheRunner(getCacheHome(), context.getCaches()).call(new CacheCallable<Void>() { return new CacheRunner(getCacheHome(), jobContext.getCaches()).call(new CacheCallable<Void>() {
@Override @Override
public Void call(Collection<CacheAllocation> allocations) { public Void call(Collection<CacheAllocation> allocations) {
context.notifyJobRunning(); jobContext.notifyJobRunning();
login(logger); login(logger);
logger.info("Pulling image...") ; logger.log("Pulling image...") ;
Commandline docker = getDocker(); Commandline docker = getDocker();
docker.addArgs("pull", context.getEnvironment()); docker.addArgs("pull", jobContext.getEnvironment());
docker.execute(newInfoLogger(logger), newErrorLogger(logger), logger).checkReturnCode(); docker.execute(newCommandLogger(logger), newCommandLogger(logger)).checkReturnCode();
docker.clearArgs(); docker.clearArgs();
String jobInstance = UUID.randomUUID().toString(); String jobInstance = UUID.randomUUID().toString();
docker.addArgs("run", "--rm", "--name", jobInstance); docker.addArgs("run", "--rm", "--name", jobInstance);
for (Map.Entry<String, String> entry: context.getEnvVars().entrySet()) for (Map.Entry<String, String> entry: jobContext.getEnvVars().entrySet())
docker.addArgs("--env", entry.getKey() + "=" + entry.getValue()); docker.addArgs("--env", entry.getKey() + "=" + entry.getValue());
if (getRunOptions() != null) if (getRunOptions() != null)
docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions())); docker.addArgs(StringUtils.parseQuoteTokens(getRunOptions()));
String imageOS = getImageOS(logger, context.getEnvironment()); String imageOS = getImageOS(logger, jobContext.getEnvironment());
logger.info("Detected image OS: " + imageOS); logger.log("Detected image OS: " + imageOS);
boolean windows = imageOS.equals("windows"); boolean windows = imageOS.equals("windows");
@ -206,16 +207,16 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
} }
} }
File effectiveWorkspace = workspaceCache != null? workspaceCache: context.getServerWorkspace(); File effectiveWorkspace = workspaceCache != null? workspaceCache: jobContext.getServerWorkspace();
if (context.isCloneSource()) { if (jobContext.isCloneSource()) {
logger.info("Cloning source code..."); logger.log("Cloning source code...");
context.checkoutSource(effectiveWorkspace); jobContext.checkoutSource(effectiveWorkspace);
} }
if (workspaceCache != null) { if (workspaceCache != null) {
try { try {
FileUtils.copyDirectory(context.getServerWorkspace(), workspaceCache); FileUtils.copyDirectory(jobContext.getServerWorkspace(), workspaceCache);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -231,45 +232,45 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
if (windows) { if (windows) {
File scriptFile = new File(effectiveWorkspace, "onedev-job-commands.bat"); File scriptFile = new File(effectiveWorkspace, "onedev-job-commands.bat");
try { try {
FileUtils.writeLines(scriptFile, context.getCommands(), "\r\n"); FileUtils.writeLines(scriptFile, jobContext.getCommands(), "\r\n");
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
docker.addArgs(context.getEnvironment()); docker.addArgs(jobContext.getEnvironment());
docker.addArgs("cmd", "/c", dockerWorkspacePath + "\\onedev-job-commands.bat"); docker.addArgs("cmd", "/c", dockerWorkspacePath + "\\onedev-job-commands.bat");
} else { } else {
File scriptFile = new File(effectiveWorkspace, "onedev-job-commands.sh"); File scriptFile = new File(effectiveWorkspace, "onedev-job-commands.sh");
try { try {
FileUtils.writeLines(scriptFile, context.getCommands(), "\n"); FileUtils.writeLines(scriptFile, jobContext.getCommands(), "\n");
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
docker.addArgs(context.getEnvironment()); docker.addArgs(jobContext.getEnvironment());
docker.addArgs("sh", dockerWorkspacePath + "/onedev-job-commands.sh"); docker.addArgs("sh", dockerWorkspacePath + "/onedev-job-commands.sh");
} }
logger.info("Running container to execute job..."); logger.log("Running container to execute job...");
try { try {
docker.execute(newInfoLogger(logger), newErrorLogger(logger), null, new ProcessKiller() { docker.execute(newCommandLogger(logger), newCommandLogger(logger), null, new ProcessKiller() {
@Override @Override
public void kill(Process process) { public void kill(Process process) {
logger.info("Stopping container..."); logger.log("Stopping container...");
Commandline cmd = getDocker(); Commandline cmd = getDocker();
cmd.addArgs("stop", jobInstance); cmd.addArgs("stop", jobInstance);
cmd.execute(newInfoLogger(logger), newErrorLogger(logger), logger); cmd.execute(newCommandLogger(logger), newCommandLogger(logger)).checkReturnCode();
} }
}, logger).checkReturnCode(); }).checkReturnCode();
return null; return null;
} finally { } finally {
if (workspaceCache != null) { if (workspaceCache != null) {
int baseLen = workspaceCache.getAbsolutePath().length()+1; int baseLen = workspaceCache.getAbsolutePath().length()+1;
for (File file: context.getCollectFiles().listFiles(workspaceCache)) { for (File file: jobContext.getCollectFiles().listFiles(workspaceCache)) {
try { try {
FileUtils.copyFile(file, new File(context.getServerWorkspace(), file.getAbsolutePath().substring(baseLen))); FileUtils.copyFile(file, new File(jobContext.getServerWorkspace(), file.getAbsolutePath().substring(baseLen)));
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -278,40 +279,29 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
} }
} }
}, logger); });
} }
}); });
} }
private LineConsumer newInfoLogger(Logger logger) { private LineConsumer newCommandLogger(JobLogger logger) {
return new LineConsumer(Charsets.UTF_8.name()) { return new LineConsumer(Charsets.UTF_8.name()) {
@Override @Override
public void consume(String line) { public void consume(String line) {
logger.info(line); logger.log(line);
} }
}; };
} }
private LineConsumer newErrorLogger(Logger logger) { private void login(JobLogger logger) {
return new LineConsumer(Charsets.UTF_8.name()) {
@Override
public void consume(String line) {
logger.error(line);
}
};
}
private void login(Logger logger) {
for (RegistryLogin login: getRegistryLogins()) { for (RegistryLogin login: getRegistryLogins()) {
if (login.getRegistryUrl() != null) if (login.getRegistryUrl() != null)
logger.info("Login to docker registry '{}'...", login.getRegistryUrl()); logger.log(String.format("Login to docker registry '%s'...", login.getRegistryUrl()));
else else
logger.info("Login to official docker registry..."); logger.log("Login to official docker registry...");
Commandline cmd = getDocker(); Commandline cmd = getDocker();
cmd.addArgs("login", "-u", login.getUserName(), "--password-stdin"); cmd.addArgs("login", "-u", login.getUserName(), "--password-stdin");
if (login.getRegistryUrl() != null) if (login.getRegistryUrl() != null)
@ -322,7 +312,7 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
} catch (UnsupportedEncodingException e) { } catch (UnsupportedEncodingException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
cmd.execute(newInfoLogger(logger), newErrorLogger(logger), input, logger).checkReturnCode(); cmd.execute(newCommandLogger(logger), newCommandLogger(logger), input).checkReturnCode();
} }
} }
@ -409,19 +399,27 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
@Override @Override
public void test(TestData testData) { public void test(TestData testData) {
logger.info("Testing local docker executor..."); JobLogger logger = new JobLogger() {
@Override
public void log(String message, Throwable t) {
ServerDockerExecutor.logger.info(message, t);
}
};
logger.log("Testing local docker executor...");
login(logger); login(logger);
logger.info("Pulling image..."); logger.log("Pulling image...");
Commandline cmd = getDocker(); Commandline cmd = getDocker();
cmd.addArgs("pull", testData.getDockerImage()); cmd.addArgs("pull", testData.getDockerImage());
cmd.execute(newInfoLogger(logger), newErrorLogger(logger), logger).checkReturnCode(); cmd.execute(newCommandLogger(logger), newCommandLogger(logger)).checkReturnCode();
boolean windows = getImageOS(logger, testData.getDockerImage()).equals("windows"); boolean windows = getImageOS(logger, testData.getDockerImage()).equals("windows");
logger.info("Running container..."); logger.log("Running container...");
File cacheHome = getCacheHome(); File cacheHome = getCacheHome();
boolean cacheHomeExists = cacheHome.exists(); boolean cacheHomeExists = cacheHome.exists();
File workspaceDir = null; File workspaceDir = null;
@ -455,7 +453,7 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
else else
cmd.addArgs("sh", "-c", "echo hello from container"); cmd.addArgs("sh", "-c", "echo hello from container");
cmd.execute(newInfoLogger(logger), newErrorLogger(logger), logger).checkReturnCode(); cmd.execute(newCommandLogger(logger), newCommandLogger(logger)).checkReturnCode();
} finally { } finally {
if (workspaceDir != null) if (workspaceDir != null)
FileUtils.deleteDir(workspaceDir); FileUtils.deleteDir(workspaceDir);
@ -466,10 +464,10 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
} }
if (!SystemUtils.IS_OS_WINDOWS) { if (!SystemUtils.IS_OS_WINDOWS) {
logger.info("Checking busybox..."); logger.log("Checking busybox...");
cmd = getDocker(); cmd = getDocker();
cmd.addArgs("run", "--rm", "busybox", "sh", "-c", "echo hello from busybox"); cmd.addArgs("run", "--rm", "busybox", "sh", "-c", "echo hello from busybox");
cmd.execute(newInfoLogger(logger), newErrorLogger(logger), logger).checkReturnCode(); cmd.execute(newCommandLogger(logger), newCommandLogger(logger)).checkReturnCode();
} }
} }
@ -482,7 +480,21 @@ public class ServerDockerExecutor extends JobExecutor implements Testable<TestDa
String containerPath = "/onedev_dir_to_clean"; String containerPath = "/onedev_dir_to_clean";
cmd.addArgs("run", "-v", dir.getAbsolutePath() + ":" + containerPath, "--rm", cmd.addArgs("run", "-v", dir.getAbsolutePath() + ":" + containerPath, "--rm",
"busybox", "sh", "-c", "rm -rf " + containerPath + "/*"); "busybox", "sh", "-c", "rm -rf " + containerPath + "/*");
cmd.execute(newInfoLogger(logger), newErrorLogger(logger), logger).checkReturnCode(); cmd.execute(new LineConsumer() {
@Override
public void consume(String line) {
logger.info(line);
}
}, new LineConsumer() {
@Override
public void consume(String line) {
logger.error(line);
}
}).checkReturnCode();
} }
} }