org.argeo.slc.core.test.context,
org.argeo.slc.core.test.tree,
org.argeo.slc.execution,
+ org.argeo.slc.jsch,
org.argeo.slc.osgi,
org.argeo.slc.structure,
org.argeo.slc.test,
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>\r
+<beans xmlns="http://www.springframework.org/schema/beans"\r
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:p="http://www.springframework.org/schema/p"\r
+ xmlns:aop="http://www.springframework.org/schema/aop" xmlns:flow="http://www.argeo.org/schema/slc-flow"\r
+ xsi:schemaLocation="\r
+ http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.5.xsd\r
+ http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-2.5.xsd\r
+ http://www.argeo.org/schema/slc-flow http://www.argeo.org/schema/slc-flow-0.12.xsd">\r
+\r
+ <!-- Basic echo, should work on all OSs -->\r
+ <flow:flow name="os/systemCall">\r
+ <bean p:cmd="echo Hello World!" class="org.argeo.slc.core.execution.tasks.SystemCall" />\r
+ </flow:flow>\r
+\r
+ <!-- Must disable requiretty in sudoers file -->\r
+ <flow:flow name="os/sudo">\r
+ <bean p:cmd="sudo id" class="org.argeo.slc.core.execution.tasks.SystemCall">\r
+ <property name="environmentVariables">\r
+ <map>\r
+ <entry key="SUDO_ASKPASS" value="/usr/libexec/openssh/gnome-ssh-askpass" />\r
+ </map>\r
+ </property>\r
+ </bean>\r
+ </flow:flow>\r
+\r
+ <!-- SSH -->\r
+ <flow:flow name="os/ssh">\r
+ <bean p:cmd="ls /etc" class="org.argeo.slc.core.execution.tasks.SystemCall">\r
+ <property name="executor" ref="sshExecutor" />\r
+ </bean>\r
+ </flow:flow>\r
+\r
+ <bean name="sshExecutor" class="org.argeo.slc.jsch.JschExecutor">\r
+ <property name="sshTarget">\r
+ <bean p:host="localhost" p:port="22" p:user="${user.name}"\r
+ p:localPrivateKey="${user.home}/.ssh/argeo_admin_rsa" class="org.argeo.slc.jsch.SshTarget" />\r
+ </property>\r
+ </bean>\r
+\r
+\r
+\r
+</beans>
\ No newline at end of file
org.argeo.slc.demo.ant,\
org.argeo.slc.demo.basic,\
org.argeo.slc.demo.minimal,\
+org.argeo.elgis.rpmfactory,\
eclipse.application=org.argeo.slc.client.rcp.application
org.argeo.security.ui.initialPerspective=org.argeo.slc.client.ui.slcExecutionPerspective
+++ /dev/null
-argeo.osgi.start=\
-org.springframework.osgi.extender,\
-org.argeo.slc.gis.position.gpsbabel,\
-org.argeo.slc.gis.position.ui
-
-log4j.configuration=file:../../log4j.properties
<feature
id="org.argeo.slc.ide"
label="Argeo Java IDE"
- version="0.13.1.D20110413_1126"
+ version="0.13.1.D20110904_1415"
provider-name="Argeo"
plugin="org.argeo.slc.ide.branding"
image="icons/argeo-icon-100104-256.png">
id="org.argeo.slc.ide.ui"
download-size="0"
install-size="0"
- version="0.13.1.D20110413_1126"
+ version="0.13.1.D20110904_1415"
unpack="false"/>
<plugin
id="org.argeo.slc.ide.branding"
download-size="0"
install-size="0"
- version="0.13.1.D20110413_1126"
+ version="0.13.1.D20110904_1415"
unpack="false"/>
</feature>
Node newNode = JcrUtils.mkdirs(session, destPath,
SlcTypes.SLC_PROCESS);
+ Node rootRealizedFlowNode = newNode.addNode(SLC_FLOW);
// copy node
- JcrUtils.copy(processNode, newNode);
+ JcrUtils.copy(processNode.getNode(SLC_FLOW), rootRealizedFlowNode);
newNode.setProperty(SLC_UUID, uuid);
newNode.setProperty(SLC_STATUS, ExecutionProcess.INITIALIZED);
// reset realized flow status
- Node rootRealizedFlowNode = newNode.getNode(SLC_FLOW);
// we just manage one level for the time being
NodeIterator nit = rootRealizedFlowNode.getNodes(SLC_FLOW);
while (nit.hasNext()) {
builderPage = new ProcessBuilderPage(this, processNode);
addPage(builderPage);
firePropertyChange(PROP_DIRTY);
- logPage = new ProcessLogPage(this);
+ logPage = new ProcessLogPage(this, processNode);
addPage(logPage);
} catch (PartInitException e) {
throw new SlcException("Cannot add pages", e);
}
public void addSteps(ExecutionProcess process, List<ExecutionStep> steps) {
- logPage.addSteps(steps);
+ // logPage.addSteps(steps);
}
/** Expects one session per editor. */
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.List;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.RepositoryException;
+import javax.jcr.Workspace;
+import javax.jcr.observation.Event;
+import javax.jcr.observation.EventListener;
+import javax.jcr.query.Query;
+
+import org.argeo.eclipse.ui.jcr.AsyncUiEventListener;
+import org.argeo.slc.SlcException;
import org.argeo.slc.execution.ExecutionStep;
+import org.argeo.slc.jcr.SlcNames;
+import org.argeo.slc.jcr.SlcTypes;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
*/
private StringBuffer beforeTextInit = new StringBuffer("");
- public ProcessLogPage(FormEditor editor) {
+ private Node processNode;
+ /**
+ * optimization field: we compute once the length of the path to slc:log so
+ * that we can easily substring the relative path of logs.
+ */
+ private Integer logPathLength;
+
+ public ProcessLogPage(FormEditor editor, Node processNode) {
super(editor, ID, "Log");
+ this.processNode = processNode;
+
+ EventListener listener = new LogListener(editor.getSite().getPage()
+ .getWorkbenchWindow().getWorkbench().getDisplay());
+
+ try {
+ String logBasePath = processNode.getPath() + '/' + SlcNames.SLC_LOG;
+ logPathLength = logBasePath.length();
+
+ Workspace ws = processNode.getSession().getWorkspace();
+
+ String statement = "SELECT * FROM ["
+ + SlcTypes.SLC_LOG_ENTRY
+ + "] as logEntry"
+ + " WHERE ISDESCENDANTNODE('"
+ + logBasePath
+ + "')"
+ + " ORDER BY logEntry.[slc:timestamp] ASC, NAME(logEntry) ASC";
+ StringBuffer buf = new StringBuffer("");
+ NodeIterator it = ws.getQueryManager()
+ .createQuery(statement, Query.JCR_SQL2).execute()
+ .getNodes();
+ while (it.hasNext())
+ appendLogEntry(buf, it.nextNode());
+ beforeTextInit = new StringBuffer(buf.toString());
+ // text.setText(buf.toString());
+ ws.getObservationManager().addEventListener(listener,
+ Event.NODE_ADDED, logBasePath, true, null, null, false);
+ } catch (RepositoryException e) {
+ throw new SlcException("Cannot register listener", e);
+ }
}
@Override
text = tk.createText(parent, "", SWT.MULTI | SWT.H_SCROLL
| SWT.V_SCROLL);
text.setEditable(false);
-
+
// transfer the existing buffer the first time
if (beforeTextInit.length() > 0) {
text.append(beforeTextInit.toString());
}
-// @Override
-// protected synchronized void createFormContent(IManagedForm mf) {
-// ScrolledForm form = mf.getForm();
-// form.setExpandHorizontal(true);
-// form.setExpandVertical(true);
-// // form.setText("Log");
-// FillLayout mainLayout = new FillLayout();
-// form.getBody().setLayout(mainLayout);
-//
-// FormToolkit tk = getManagedForm().getToolkit();
-// text = tk.createText(form.getBody(), "", SWT.MULTI | SWT.H_SCROLL
-// | SWT.V_SCROLL);
-// text.setEditable(false);
-// // transfer the existing buffer the first time
-// if (beforeTextInit.length() > 0) {
-// text.append(beforeTextInit.toString());
-// // clear buffer
-// beforeTextInit.setLength(0);
-// }
-// }
+ // @Override
+ // protected synchronized void createFormContent(IManagedForm mf) {
+ // ScrolledForm form = mf.getForm();
+ // form.setExpandHorizontal(true);
+ // form.setExpandVertical(true);
+ // // form.setText("Log");
+ // FillLayout mainLayout = new FillLayout();
+ // form.getBody().setLayout(mainLayout);
+ //
+ // FormToolkit tk = getManagedForm().getToolkit();
+ // text = tk.createText(form.getBody(), "", SWT.MULTI | SWT.H_SCROLL
+ // | SWT.V_SCROLL);
+ // text.setEditable(false);
+ // // transfer the existing buffer the first time
+ // if (beforeTextInit.length() > 0) {
+ // text.append(beforeTextInit.toString());
+ // // clear buffer
+ // beforeTextInit.setLength(0);
+ // }
+ // }
+ protected void appendLogEntry(StringBuffer buf, Node logEntry)
+ throws RepositoryException {
+ // +1 in order to remove the first slash
+ String relPath = logEntry.getPath().substring(logPathLength + 1);
+ //System.out.println("relPath=" + relPath);
+ int firstSlashIndex = relPath.indexOf('/');
+ int lastSlashIndex = relPath.lastIndexOf('/');
+ String thread = relPath.substring(0, firstSlashIndex);
+ String location = relPath.substring(firstSlashIndex, lastSlashIndex);
+
+ // String date = dateFormat.format(logEntry
+ // .getProperty(SlcNames.SLC_TIMESTAMP).getDate().getTime());
+ String date = logEntry.getProperty(SlcNames.SLC_TIMESTAMP).getString();
+ buf.append(date).append(' ');
+ String type = logEntry.getPrimaryNodeType().getName().substring(7);
+ buf.append(type).append('\t');
+ // buf.append(thread).append('\t');
+ // buf.append(location).append('\t');
+ buf.append(logEntry.getProperty(SlcNames.SLC_MESSAGE).getString());
+ buf.append('\n');
+
+ }
+
+ /** @deprecated */
public synchronized void addSteps(List<ExecutionStep> steps) {
final StringBuffer buf = new StringBuffer("");
for (ExecutionStep step : steps) {
text.setFocus();
}
+ /** JCR event listener notifying when new nodes are added */
+ private class LogListener extends AsyncUiEventListener {
+
+ public LogListener(Display display) {
+ super(display);
+ }
+
+ @Override
+ protected void onEventInUiThread(List<Event> events)
+ throws RepositoryException {
+ // since we use batch save, order is not guaranteed
+ // so we need to reorder, according to log line number for the time
+ // being
+ SortedMap<Long, Node> nodes = new TreeMap<Long, Node>();
+
+ for (Event evt : events) {
+ Node newNode = ProcessLogPage.this.processNode.getSession()
+ .getNode(evt.getPath());
+ if (newNode.isNodeType(SlcTypes.SLC_LOG_ENTRY)) {
+ nodes.put(Long.parseLong(newNode.getName()), newNode);
+ }
+ }
+
+ StringBuffer buf = new StringBuffer("");
+ for (Node logEntry : nodes.values()) {
+ appendLogEntry(buf, logEntry);
+ }
+
+ if (text != null)
+ text.append(buf.toString());
+ else
+ beforeTextInit.append(buf);
+ }
+ }
}
Bundle-ManifestVersion: 2
Bundle-Name: Argeo IDE
Bundle-SymbolicName: org.argeo.slc.ide.branding;singleton:=true
-Bundle-Version: 0.13.1.D20110413_1126
+Bundle-Version: 0.13.1.D20110904_1415
Bundle-Vendor: Argeo.org
Require-Bundle: org.eclipse.ui;bundle-version="3.5.1",
org.eclipse.osgi;bundle-version="3.5.1",
Bundle-ManifestVersion: 2
Bundle-Name: SLC IDE UI
Bundle-SymbolicName: org.argeo.slc.ide.ui;singleton:=true
-Bundle-Version: 0.13.1.D20110413_1126
+Bundle-Version: 0.13.1.D20110904_1415
Bundle-Activator: org.argeo.slc.ide.ui.SlcIdeUiPlugin
Require-Bundle: org.eclipse.ui,
org.eclipse.core.runtime,
package org.argeo.slc.ide.ui.launch.osgi;
-import org.eclipse.core.runtime.CoreException;
-import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
import org.eclipse.debug.ui.CommonTab;
import org.eclipse.debug.ui.EnvironmentTab;
import org.eclipse.debug.ui.ILaunchConfigurationDialog;
import org.eclipse.pde.ui.launcher.EclipseLauncherTabGroup;
import org.eclipse.pde.ui.launcher.MainTab;
import org.eclipse.pde.ui.launcher.OSGiSettingsTab;
-import org.eclipse.pde.ui.launcher.PluginsTab;
import org.eclipse.pde.ui.launcher.TracingTab;
/** Definition of the set of tabs used in Eclipse Boot launch configuration UI. */
ILaunchConfigurationTab[] tabs = new ILaunchConfigurationTab[] {
new OsgiBootMainTab(true),
new MainTab(),
- new PluginsTab() {
- private boolean activating = false;
-
- @Override
- public void performApply(
- ILaunchConfigurationWorkingCopy config) {
- super.performApply(config);
- if (activating) {
- try {
- config.doSave();
- } catch (CoreException e) {
- e.printStackTrace();
- }
- activating = false;
- }
- }
-
- @Override
- public void activated(
- ILaunchConfigurationWorkingCopy workingCopy) {
- activating = true;
- }
- }, new OSGiSettingsTab(), new EnvironmentTab(),
- new TracingTab(), new CommonTab() };
+ // new PluginsTab() {
+ // private boolean activating = false;
+ //
+ // @Override
+ // public void performApply(
+ // ILaunchConfigurationWorkingCopy config) {
+ // super.performApply(config);
+ // if (activating) {
+ // try {
+ // config.doSave();
+ // } catch (CoreException e) {
+ // e.printStackTrace();
+ // }
+ // activating = false;
+ // }
+ // }
+ //
+ // @Override
+ // public void activated(
+ // ILaunchConfigurationWorkingCopy workingCopy) {
+ // activating = true;
+ // }
+ // },
+ new OSGiSettingsTab(), new EnvironmentTab(), new TracingTab(),
+ new CommonTab() };
setTabs(tabs);
}
container.setLayout(new GridLayout());
container.setLayoutData(new GridData(GridData.FILL_BOTH));
- createGeneral(container);
createAdditionalProgramArgs(container);
createAdditionalVmArgumentBlock(container);
+ createAdvanced(container);
Dialog.applyDialogFont(container);
setControl(container);
}
- protected void createGeneral(Composite parent) {
- Group container = new Group(parent, SWT.NONE);
- container.setText("General");
- GridLayout layout = new GridLayout();
- layout.numColumns = 2;
- container.setLayout(layout);
- container.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
-
- syncBundles = new Button(container, SWT.CHECK);
- syncBundles.addSelectionListener(listener);
- new Label(container, SWT.NONE)
- .setText("Keep bundles in line with target platform and workspace (recommended)");
- clearDataDirectory = new Button(container, SWT.CHECK);
- clearDataDirectory.addSelectionListener(listener);
- new Label(container, SWT.NONE)
- .setText("Clear data directory before launch");
- }
-
+ /** Init UI for programs arguments */
protected void createAdditionalProgramArgs(Composite parent) {
Group container = new Group(parent, SWT.NONE);
container.setText("Additional Program Arguments");
additionalProgramArgs.addModifyListener(listener);
}
+ /** Init UI for VM arguments */
protected void createAdditionalVmArgumentBlock(Composite parent) {
Group container = new Group(parent, SWT.NONE);
container.setText("Additional VM Arguments");
additionalVmArgs.setLayoutData(gd);
additionalVmArgs.addModifyListener(listener);
+ }
+
+ /** Init UI for Advanced section */
+ protected void createAdvanced(Composite parent) {
+ Group container = new Group(parent, SWT.NONE);
+ container.setText("Advanced");
+ GridLayout layout = new GridLayout();
+ layout.numColumns = 2;
+ container.setLayout(layout);
+ container.setLayoutData(new GridData(GridData.FILL_HORIZONTAL));
+
+ syncBundles = new Button(container, SWT.CHECK);
+ syncBundles.addSelectionListener(listener);
+ new Label(container, SWT.NONE)
+ .setText("Keep bundles in line with target platform and workspace (recommended)");
+ clearDataDirectory = new Button(container, SWT.CHECK);
+ clearDataDirectory.addSelectionListener(listener);
+ new Label(container, SWT.NONE)
+ .setText("Clear data directory before launch");
+
addJvmPaths = new Button(container, SWT.CHECK);
addJvmPaths.addSelectionListener(listener);
new Label(container, SWT.NONE)
import java.util.List;
import java.util.Map;
import java.util.Properties;
+import java.util.Set;
import java.util.StringTokenizer;
+import java.util.TreeSet;
import org.argeo.slc.ide.ui.SlcIdeUiPlugin;
import org.eclipse.core.resources.IFile;
*/
@SuppressWarnings("restriction")
public class OsgiLaunchHelper implements OsgiLauncherConstants {
- private static Boolean debug = false;
+ private static Boolean debug = true;
private final static String DEFAULT_DATA_DIR = "data";
private final static String DEFAULT_EXEC_DIR = "exec";
String originalVmArgs = wc.getAttribute(
IJavaLaunchConfigurationConstants.ATTR_VM_ARGUMENTS, "");
wc.setAttribute(ATTR_DEFAULT_VM_ARGS, originalVmArgs);
- wc.setAttribute(IPDELauncherConstants.CONFIG_CLEAR_AREA, true);
+ wc.setAttribute(IPDELauncherConstants.CONFIG_CLEAR_AREA, false);
} catch (CoreException e) {
Shell shell = Display.getCurrent().getActiveShell();
ErrorDialog.openError(shell, "Error",
if (debug)
System.out.println("Original bundle list: " + original);
- StringBuffer bufBundles = new StringBuffer(1024);
StringTokenizer stComa = new StringTokenizer(original, ",");
- boolean first = true;
+ // sort by bundle symbolic name
+ Set<String> bundleIds = new TreeSet<String>();
bundles: while (stComa.hasMoreTokens()) {
- if (first)
- first = false;
- else
- bufBundles.append(',');
String bundleId = stComa.nextToken();
if (bundleId.indexOf('*') >= 0)
+ " not properly formatted, clean your workspace projects");
int indexAt = bundleId.indexOf('@');
- boolean modified = false;
if (indexAt >= 0) {
bundleId = bundleId.substring(0, indexAt);
}
// skip simple configurator in order to avoid side-effects
continue bundles;
}
+ bundleIds.add(bundleId);
+ }
+ StringBuffer bufBundles = new StringBuffer(1024);
+ boolean first = true;
+ for (String bundleId : bundleIds) {
+ if (first)
+ first = false;
+ else
+ bufBundles.append(',');
+ boolean modified = false;
if (bundlesToStart.contains(bundleId)) {
bufBundles.append(bundleId).append('@').append("default:true");
modified = true;
if (!modified)
bufBundles.append(bundleId);
+
}
String output = bufBundles.toString();
return output;
<bean id="bundlesManager" class="org.argeo.slc.osgi.BundlesManager" />\r
\r
<!-- Logging -->\r
- <bean id="log4Notification" class="org.argeo.slc.log4j.SlcExecutionAppender">\r
+ <bean id="log4Notification" class="org.argeo.slc.log4j.SlcExecutionAppender"\r
+ init-method="init" destroy-method="destroy">\r
<property name="disabled" value="${slc.agent.log4Notification.disabled}" />\r
<property name="level" value="${slc.agent.log4Notification.level}" />\r
<property name="onlyExecutionThread"\r
value="${slc.agent.log4Notification.onlyExecutionThread}" />\r
</bean>\r
\r
- \r
+\r
</beans>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>org.argeo.slc.lib.build</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.pde.ManifestBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.SchemaBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.pde.PluginNature</nature>
+ </natures>
+</projectDescription>
--- /dev/null
+Manifest-Version: 1.0
+Bundle-ManifestVersion: 2
+Bundle-Name: SLC Build
+Bundle-SymbolicName: org.argeo.slc.lib.build
+Bundle-Version: 0.13.1.SNAPSHOT
+Bundle-Vendor: Argeo
+Bundle-RequiredExecutionEnvironment: J2SE-1.5
--- /dev/null
+source.. = src/main/java/
+output.. = target/classes/
+bin.includes = META-INF/,\
+ .
public void dispatchAddSteps(ExecutionProcess process,
List<ExecutionStep> steps) {
+ process.addSteps(steps);
+
for (Iterator<SlcExecutionNotifier> it = getSlcExecutionNotifiers()
.iterator(); it.hasNext();) {
it.next().addSteps(process, steps);
import org.springframework.beans.factory.ObjectFactory;
import org.springframework.beans.factory.config.Scope;
+/**
+ * When Spring beans are instantiated with this scope, the same instance is
+ * reused across an execution.
+ */
public class ExecutionScope implements Scope {
private final static Log log = LogFactory.getLog(ExecutionScope.class);
.getFlowDescriptor();
String flowName = executionFlowDescriptor.getName();
- dispatchAddStep(new ExecutionStep(ExecutionStep.PHASE_START, "Flow "
- + flowName));
+ dispatchAddStep(new ExecutionStep(realizedFlow.getModuleName(),
+ ExecutionStep.PHASE_START, "Flow " + flowName));
try {
String autoUpgrade = System
// TODO: re-throw exception ?
String msg = "Execution of flow " + flowName + " failed.";
log.error(msg, e);
- dispatchAddStep(new ExecutionStep(ExecutionStep.ERROR, msg + " "
- + e.getMessage()));
+ dispatchAddStep(new ExecutionStep(realizedFlow.getModuleName(),
+ ExecutionStep.ERROR, msg + " " + e.getMessage()));
processThread.notifyError();
} finally {
processThread.flowCompleted();
- dispatchAddStep(new ExecutionStep(ExecutionStep.PHASE_END, "Flow "
- + flowName));
+ dispatchAddStep(new ExecutionStep(realizedFlow.getModuleName(),
+ ExecutionStep.PHASE_END, "Flow " + flowName));
}
}
processThread.getProcessThreadGroup().dispatchAddStep(step);
}
+ public RealizedFlow getRealizedFlow() {
+ return realizedFlow;
+ }
+
}
import java.util.HashSet;
import java.util.List;
import java.util.Set;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.argeo.slc.SlcException;
import org.argeo.slc.execution.ExecutionModulesManager;
import org.argeo.slc.execution.ExecutionProcess;
+import org.argeo.slc.execution.ExecutionStep;
import org.argeo.slc.process.RealizedFlow;
import org.argeo.slc.process.SlcExecution;
private Boolean hadAnError = false;
private Boolean killed = false;
+ private final static Integer STEPS_BUFFER_CAPACITY = 10000;
+ private BlockingQueue<ExecutionStep> steps = new ArrayBlockingQueue<ExecutionStep>(
+ STEPS_BUFFER_CAPACITY);
+
public ProcessThread(ThreadGroup processesThreadGroup,
ExecutionModulesManager executionModulesManager,
ExecutionProcess process) {
log.info("\n##\n## SLC Process #" + process.getUuid()
+ " STARTED\n##\n");
+ // Start logging
+ new LoggingThread().start();
+
String oldStatus = process.getStatus();
process.setStatus(ExecutionProcess.RUNNING);
executionModulesManager.dispatchUpdateStatus(process, oldStatus,
public ExecutionModulesManager getExecutionModulesManager() {
return executionModulesManager;
}
+
+ private class LoggingThread extends Thread {
+ public void run() {
+ boolean run = true;
+ while (run) {
+ List<ExecutionStep> newSteps = new ArrayList<ExecutionStep>();
+ processThreadGroup.getSteps().drainTo(newSteps);
+ if (newSteps.size() > 0) {
+ //System.out.println(steps.size() + " steps");
+ process.addSteps(newSteps);
+ }
+
+ try {
+ Thread.sleep(1000);
+ } catch (InterruptedException e) {
+ break;
+ }
+
+ if (!ProcessThread.this.isAlive()
+ && processThreadGroup.getSteps().size() == 0)
+ run = false;
+ }
+ }
+
+ }
}
import java.util.ArrayList;
import java.util.List;
+import java.util.concurrent.ArrayBlockingQueue;
+import java.util.concurrent.BlockingQueue;
import org.argeo.slc.execution.ExecutionModulesManager;
import org.argeo.slc.execution.ExecutionProcess;
private final ExecutionModulesManager executionModulesManager;
private final ProcessThread processThread;
+ private final static Integer STEPS_BUFFER_CAPACITY = 10000;
+
+ private BlockingQueue<ExecutionStep> steps = new ArrayBlockingQueue<ExecutionStep>(
+ STEPS_BUFFER_CAPACITY);
+
public ProcessThreadGroup(ExecutionModulesManager executionModulesManager,
ProcessThread processThread) {
super("SLC Process #" + processThread.getProcess().getUuid()
List<ExecutionStep> steps = new ArrayList<ExecutionStep>();
steps.add(step);
- dispatchAddSteps(steps);
+ // dispatchAddSteps(steps);
+ this.steps.add(step);
}
public void dispatchAddSteps(List<ExecutionStep> steps) {
executionModulesManager.dispatchAddSteps(slcProcess, steps);
}
+ public BlockingQueue<ExecutionStep> getSteps() {
+ return steps;
+ }
+
}
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Collections;
private String cmd = null;
private List<Object> command = null;
+ private Executor executor = new DefaultExecutor();
private Boolean synchronous = true;
private String stdErrLogLevel = "ERROR";
private Resource stdOutFile = null;
private Resource stdErrFile = null;
+
private Resource stdInFile = null;
+ /**
+ * If no {@link #stdInFile} provided, writing to this stream will write to
+ * the stdin of the process.
+ */
+ private OutputStream stdInSink = null;
+
private Boolean redirectStdOut = false;
private List<SystemCallOutputListener> outputListeners = Collections
private String osConsole = null;
private String generateScript = null;
+ /** 24 hours */
private Long watchdogTimeout = 24 * 60 * 60 * 1000l;
private TestResult testResult;
stdErrWriter = createWriter(stdOutFile, true);
}
- if (stdInFile != null)
- try {
+ try {
+ if (stdInFile != null)
stdInStream = stdInFile.getInputStream();
- } catch (IOException e2) {
- throw new SlcException("Cannot open a stream for " + stdInFile,
- e2);
+ else {
+ stdInStream = new PipedInputStream();
+ stdInSink = new PipedOutputStream(
+ (PipedInputStream) stdInStream);
}
+ } catch (IOException e2) {
+ throw new SlcException("Cannot open a stream for " + stdInFile, e2);
+ }
if (log.isTraceEnabled()) {
log.debug("os.name=" + System.getProperty("os.name"));
dir.mkdirs();
// Watchdog to check for lost processes
- Executor executor = new DefaultExecutor();
- executor.setWatchdog(new ExecuteWatchdog(watchdogTimeout));
+ Executor executorToUse;
+ if (executor != null)
+ executorToUse = executor;
+ else
+ executorToUse = new DefaultExecutor();
+ executorToUse.setWatchdog(new ExecuteWatchdog(watchdogTimeout));
if (redirectStreams) {
// Redirect standard streams
- executor.setStreamHandler(createExecuteStreamHandler(stdOutWriter,
- stdOutputStream, stdErrWriter, stdInStream));
+ executorToUse.setStreamHandler(createExecuteStreamHandler(
+ stdOutWriter, stdOutputStream, stdErrWriter, stdInStream));
} else {
// Dummy stream handler (otherwise pump is used)
- executor.setStreamHandler(new DummyexecuteStreamHandler());
+ executorToUse.setStreamHandler(new DummyexecuteStreamHandler());
}
- executor.setProcessDestroyer(new ShutdownHookProcessDestroyer());
- executor.setWorkingDirectory(dir);
+ executorToUse.setProcessDestroyer(new ShutdownHookProcessDestroyer());
+ executorToUse.setWorkingDirectory(dir);
// Command line to use
final CommandLine commandLine = createCommandLine();
// Env variables
Map<String, String> environmentVariablesToUse = null;
- if (environmentVariables.size() > 0) {
- environmentVariablesToUse = new HashMap<String, String>();
- if (mergeEnvironmentVariables)
- environmentVariablesToUse.putAll(System.getenv());
+ environmentVariablesToUse = new HashMap<String, String>();
+ if (mergeEnvironmentVariables)
+ environmentVariablesToUse.putAll(System.getenv());
+ if (environmentVariables.size() > 0)
environmentVariablesToUse.putAll(environmentVariables);
- }
// Execute
ExecuteResultHandler executeResultHandler = createExecuteResultHandler(commandLine);
try {
if (synchronous)
try {
- int exitValue = executor.execute(commandLine,
+ int exitValue = executorToUse.execute(commandLine,
environmentVariablesToUse);
executeResultHandler.onProcessComplete(exitValue);
} catch (ExecuteException e1) {
executeResultHandler.onProcessFailed(e1);
}
else
- executor.execute(commandLine, environmentVariablesToUse,
+ executorToUse.execute(commandLine, environmentVariablesToUse,
executeResultHandler);
} catch (SlcException e) {
throw e;
IOUtils.closeQuietly(stdOutWriter);
IOUtils.closeQuietly(stdErrWriter);
IOUtils.closeQuietly(stdInStream);
+ IOUtils.closeQuietly(stdInSink);
}
}
if (stdErrWriter != null)
appendLineToFile(stdErrWriter, line);
}
- }, stdInStream);
+ }, stdInStream) {
+
+ @Override
+ public void stop() {
+ // prevents the method to block when joining stdin
+ if (stdInSink != null)
+ IOUtils.closeQuietly(stdInSink);
+
+ super.stop();
+ }
+ };
return pumpStreamHandler;
}
this.outputListeners = outputListeners;
}
+ public void setExecutor(Executor executor) {
+ this.executor = executor;
+ }
+
private class DummyexecuteStreamHandler implements ExecuteStreamHandler {
public void setProcessErrorStream(InputStream is) throws IOException {
}
}
-
}
\r
package org.argeo.slc.execution;\r
\r
+/** Variables or references attached to an execution (typically thread bounded).*/\r
public interface ExecutionContext {\r
public final static String VAR_EXECUTION_CONTEXT_ID = "slcVar.executionContext.id";\r
public final static String VAR_EXECUTION_CONTEXT_CREATION_DATE = "slcVar.executionContext.creationDate";\r
package org.argeo.slc.execution;
+import java.util.List;
+
/**
* A process is the functional representation of a combination of executions.
* While an execution is the actual java code running, a process exists before,
* during and after the execution actually took place, providing an entry point
* for the definition of executions, their monitoring (e.g. logging) and
- * tracking. A process can be distributed or parallelized.
- * <br/>
+ * tracking. A process can be distributed or parallelized. <br/>
* NEW => INITIALIZED => SCHEDULED => RUNNING<br/>
* RUNNING => {COMPLETED | ERROR | KILLED}<br/>
* {COMPLETED | ERROR | KILLED} => PURGED<br/>
/** Sets the current status of this process */
public void setStatus(String status);
+
+ public void addSteps(List<ExecutionStep> steps);
}
public final static String DEBUG = "DEBUG";\r
public final static String TRACE = "TRACE";\r
\r
- /** @deprecated*/\r
+ /** @deprecated */\r
public final static String START = "START";\r
- /** @deprecated*/\r
+ /** @deprecated */\r
public final static String END = "END";\r
\r
// TODO make the fields final and private when we don't need POJO support\r
protected Date timestamp;\r
protected String log;\r
\r
+ private String location;\r
+\r
/** Empty constructor */\r
public ExecutionStep() {\r
- thread = Thread.currentThread().getName();\r
- }\r
-\r
- /** Creates a step at the current date of type INFO */\r
- public ExecutionStep(String log) {\r
- this(new Date(), INFO, log);\r
+ Thread currentThread = Thread.currentThread();\r
+ thread = currentThread.getName();\r
}\r
\r
/** Creates a step at the current date */\r
- public ExecutionStep(String type, String log) {\r
- this(new Date(), type, log);\r
+ public ExecutionStep(String location, String type, String log) {\r
+ this(location, new Date(), type, log);\r
}\r
\r
/** Creates a step of the given type. */\r
- public ExecutionStep(Date timestamp, String type, String log) {\r
- this(timestamp, type, log, Thread.currentThread().getName());\r
+ public ExecutionStep(String location, Date timestamp, String type,\r
+ String log) {\r
+ this(location, timestamp, type, log, Thread.currentThread().getName());\r
}\r
\r
- public ExecutionStep(Date timestamp, String type, String log, String thread) {\r
+ public ExecutionStep(String location, Date timestamp, String type,\r
+ String log, String thread) {\r
+ this.location = location;\r
this.type = type;\r
this.timestamp = timestamp;\r
this.thread = thread;\r
return "Execution step, thread=" + thread + ", type=" + type;\r
}\r
\r
+ /** Typically the logging category */\r
+ public String getLocation() {\r
+ return location;\r
+ }\r
+\r
}\r
import org.argeo.slc.execution.ExecutionFlowDescriptor;
import org.argeo.slc.execution.ExecutionSpec;
+/** A fully configured execution flow, ready to be executed. */
public class RealizedFlow implements Serializable {
private static final long serialVersionUID = 1L;
import java.util.TreeMap;\r
\r
import org.argeo.slc.execution.ExecutionProcess;\r
+import org.argeo.slc.execution.ExecutionStep;\r
\r
/** @deprecated use other implementations of {@link ExecutionProcess} */\r
public class SlcExecution implements ExecutionProcess, Serializable {\r
this.steps = steps;\r
}\r
\r
+ public void addSteps(List<ExecutionStep> steps) {\r
+ // not implemented on deprecated\r
+ }\r
+\r
public String getUuid() {\r
return uuid;\r
}\r
\r
public SlcExecutionStep(Date timestamp, String type, String log,\r
String thread) {\r
- super(timestamp, type, log, thread);\r
+ super("UNKOWN_LOCATION", timestamp, type, log, thread);\r
}\r
- \r
- \r
\r
public String getUuid() {\r
return uuid;\r
this.logLines = logLines;\r
}\r
\r
- /** public for legacy reasons*/\r
+ /** public for legacy reasons */\r
public String addLog(String log) {\r
if (logLines == null)\r
logLines = new ArrayList<String>();\r
public final static String SLC_SPEC = "slc:spec";
public final static String SLC_EXECUTION_SPECS = "slc:executionSpecs";
public final static String SLC_FLOW = "slc:flow";
+ public final static String SLC_LOG = "slc:log";
+ public final static String SLC_TIMESTAMP = "slc:timestamp";
// spec attribute
public final static String SLC_IS_IMMUTABLE = "slc:isImmutable";
public final static String SLC_CHECK = "slc:check";
public final static String SLC_PROPERTY = "slc:property";
+ // Log levels
+ public final static String SLC_LOG_ENTRY = "slc:logEntry";
+ public final static String SLC_LOG_TRACE = "slc:logTrace";
+ public final static String SLC_LOG_DEBUG = "slc:logDebug";
+ public final static String SLC_LOG_INFO = "slc:logInfo";
+ public final static String SLC_LOG_WARNING = "slc:logWarn";
+ public final static String SLC_LOG_ERROR = "slc:logError";
+
/*
* REPO
*/
package org.argeo.slc.jcr.execution;
+import java.util.Calendar;
+import java.util.GregorianCalendar;
+import java.util.List;
+
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import org.argeo.jcr.JcrUtils;
import org.argeo.slc.SlcException;
import org.argeo.slc.execution.ExecutionProcess;
+import org.argeo.slc.execution.ExecutionStep;
import org.argeo.slc.jcr.SlcNames;
+import org.argeo.slc.jcr.SlcTypes;
/** Execution process implementation based on a JCR node. */
-public class JcrExecutionProcess implements ExecutionProcess {
+public class JcrExecutionProcess implements ExecutionProcess, SlcNames {
private Log log = LogFactory.getLog(JcrExecutionProcess.class);
private final Node node;
+ private Long nextLogLine = 1l;
+
public JcrExecutionProcess(Node node) {
this.node = node;
}
public String getUuid() {
try {
- return node.getProperty(SlcNames.SLC_UUID).getString();
+ return node.getProperty(SLC_UUID).getString();
} catch (RepositoryException e) {
throw new SlcException("Cannot get uuid for " + node, e);
}
public String getStatus() {
try {
- return node.getProperty(SlcNames.SLC_STATUS).getString();
+ return node.getProperty(SLC_STATUS).getString();
} catch (RepositoryException e) {
log.error("Cannot get status: " + e);
// we should re-throw exception because this information can
public void setStatus(String status) {
try {
- node.setProperty(SlcNames.SLC_STATUS, status);
+ node.setProperty(SLC_STATUS, status);
// last modified properties needs to be manually updated
// see https://issues.apache.org/jira/browse/JCR-2233
JcrUtils.updateLastModified(node);
node.getSession().save();
} catch (RepositoryException e) {
- try {
- JcrUtils.discardQuietly(node.getSession());
- } catch (RepositoryException e1) {
- // silent
- }
+ JcrUtils.discardUnderlyingSessionQuietly(node);
// we should re-throw exception because this information can
// probably used for monitoring in case there are already unexpected
// exceptions
}
}
+ /**
+ * Synchronized in order to make sure that there is no concurrent
+ * modification of {@link #nextLogLine}.
+ */
+ public synchronized void addSteps(List<ExecutionStep> steps) {
+ try {
+ steps: for (ExecutionStep step : steps) {
+ String type;
+ if (step.getType().equals(ExecutionStep.TRACE))
+ type = SlcTypes.SLC_LOG_TRACE;
+ else if (step.getType().equals(ExecutionStep.DEBUG))
+ type = SlcTypes.SLC_LOG_DEBUG;
+ else if (step.getType().equals(ExecutionStep.INFO))
+ type = SlcTypes.SLC_LOG_INFO;
+ else if (step.getType().equals(ExecutionStep.WARNING))
+ type = SlcTypes.SLC_LOG_WARNING;
+ else if (step.getType().equals(ExecutionStep.ERROR))
+ type = SlcTypes.SLC_LOG_ERROR;
+ else
+ // skip
+ continue steps;
+
+ String relPath = SLC_LOG + '/' + step.getThread() + '/'
+ + step.getLocation().replace('.', '/');
+ String path = node.getPath() + '/' + relPath;
+ Node location = JcrUtils.mkdirs(node.getSession(), path);
+ Node logEntry = location.addNode(Long.toString(nextLogLine),
+ type);
+ logEntry.setProperty(SLC_MESSAGE, step.getLog());
+ Calendar calendar = new GregorianCalendar();
+ calendar.setTime(step.getTimestamp());
+ logEntry.setProperty(SLC_TIMESTAMP, calendar);
+
+ // System.out.println("Logged " + logEntry.getPath());
+
+ nextLogLine++;
+ }
+
+ // last modified properties needs to be manually updated
+ // see https://issues.apache.org/jira/browse/JCR-2233
+ JcrUtils.updateLastModified(node);
+
+ node.getSession().save();
+ } catch (RepositoryException e) {
+ JcrUtils.discardUnderlyingSessionQuietly(node);
+ e.printStackTrace();
+ }
+ }
+
public Node getNode() {
return node;
}
- slc:uuid (STRING) ! m
- slc:status (STRING) m
+ slc:flow (slc:realizedFlow)
++ slc:log
+
+// The first part of the relative path is the thread name, rest is location
+[slc:logEntry] > nt:unstructured
+abstract
+- slc:message (STRING) !
+- slc:timestamp (STRING)
+
+// Log levels are set via types.
+// Querying one level also queries the higher levels thanks to the inheritance
+// e.g. 'select * from [slc:logWarn]' also returns errors
+[slc:logTrace] > slc:logEntry
+
+[slc:logDebug] > slc:logTrace
+
+[slc:logInfo] > slc:logDebug
+
+[slc:logWarning] > slc:logInfo
+
+[slc:logError] > slc:logWarning
[slc:realizedFlow] > nt:base
mixin
<artifactId>com.springsource.org.tmatesoft.svn</artifactId>
</dependency>
- <!-- Commons VFS -->
+ <!-- Commons -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>com.springsource.org.apache.commons.vfs</artifactId>
</dependency>
+ <dependency>
+ <groupId>org.argeo.dep.osgi</groupId>
+ <artifactId>org.argeo.dep.osgi.commons.exec</artifactId>
+ </dependency>
<dependency>
<groupId>org.argeo.dep.osgi</groupId>
--- /dev/null
+package org.argeo.slc.jsch;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteResultHandler;
+import org.apache.commons.exec.ExecuteStreamHandler;
+import org.apache.commons.exec.ExecuteWatchdog;
+import org.apache.commons.exec.Executor;
+import org.apache.commons.exec.ProcessDestroyer;
+
+/** A Commons Exec executor executing remotely via SSH */
+public class JschExecutor implements Executor {
+ private File workingDirectory;
+ private ExecuteStreamHandler streamHandler;
+
+ private SshTarget sshTarget;
+
+ public void setExitValue(int value) {
+ // TODO Auto-generated method stub
+
+ }
+
+ public void setExitValues(int[] values) {
+ // TODO Auto-generated method stub
+
+ }
+
+ public boolean isFailure(int exitValue) {
+ return Executor.INVALID_EXITVALUE == exitValue;
+ }
+
+ public ExecuteStreamHandler getStreamHandler() {
+ return streamHandler;
+ }
+
+ public void setStreamHandler(ExecuteStreamHandler streamHandler) {
+ this.streamHandler = streamHandler;
+ }
+
+ public ExecuteWatchdog getWatchdog() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public void setWatchdog(ExecuteWatchdog watchDog) {
+ // TODO Auto-generated method stub
+
+ }
+
+ public ProcessDestroyer getProcessDestroyer() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public void setProcessDestroyer(ProcessDestroyer processDestroyer) {
+ // TODO Auto-generated method stub
+
+ }
+
+ public File getWorkingDirectory() {
+ return workingDirectory;
+ }
+
+ public void setWorkingDirectory(File workingDirectory) {
+ this.workingDirectory = workingDirectory;
+ }
+
+ public int execute(CommandLine command) throws ExecuteException,
+ IOException {
+ return execute(command, (Map) null);
+ }
+
+ public int execute(CommandLine command, Map environment)
+ throws ExecuteException, IOException {
+ String cmd = command.toString();
+ RemoteExec remoteExec = new RemoteExec();
+ remoteExec.setSshTarget(sshTarget);
+ remoteExec.setStreamHandler(streamHandler);
+ remoteExec.setCommand(cmd);
+ if (environment != null)
+ remoteExec.setEnv(environment);
+ remoteExec.run();
+ return remoteExec.getLastExitStatus() != null ? remoteExec
+ .getLastExitStatus() : Executor.INVALID_EXITVALUE;
+ }
+
+ public void execute(CommandLine command, ExecuteResultHandler handler)
+ throws ExecuteException, IOException {
+ // TODO Auto-generated method stub
+
+ }
+
+ public void execute(CommandLine command, Map environment,
+ ExecuteResultHandler handler) throws ExecuteException, IOException {
+
+ }
+
+ public SshTarget getSshTarget() {
+ return sshTarget;
+ }
+
+ public void setSshTarget(SshTarget sshTarget) {
+ this.sshTarget = sshTarget;
+ }
+
+}
import java.util.Map;
import java.util.StringTokenizer;
+import org.apache.commons.exec.ExecuteStreamHandler;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
private String user;
+ private ExecuteStreamHandler streamHandler = null;
+
+ private Integer lastExitStatus = null;
/**
* If set, stdout is written to it as a list of lines. Cleared before each
* run.
throw new SlcException("Cannot specify commands and script");
BufferedReader reader = null;
try {
- reader = new BufferedReader(new InputStreamReader(script
- .getInputStream()));
+ reader = new BufferedReader(new InputStreamReader(
+ script.getInputStream()));
String line = null;
while ((line = reader.readLine()) != null) {
if (!StringUtils.hasText(line))
log.debug("Run '" + command + "' on " + getSshTarget() + "...");
channel.connect();
- if (stdIn != null) {
- Thread stdInThread = new Thread("Stdin " + getSshTarget()) {
- @Override
- public void run() {
- OutputStream out = null;
- try {
- out = channel.getOutputStream();
- IOUtils.copy(stdIn.getInputStream(), out);
- } catch (IOException e) {
- throw new SlcException("Cannot write stdin on "
- + getSshTarget(), e);
- } finally {
- IOUtils.closeQuietly(out);
- }
+ readStdIn(channel);
+ readStdOut(channel);
+
+ if (streamHandler != null){
+ streamHandler.start();
+ while(!channel.isClosed()){
+ try {
+ Thread.sleep(100);
+ } catch (Exception e) {
+ break;
}
- };
- stdInThread.start();
+ }
}
- readStdOut(channel);
+
checkExitStatus(channel);
channel.disconnect();
} catch (Exception e) {
}
}
- protected void readStdErr(final ChannelExec channel) {
- new Thread("stderr " + getSshTarget()) {
- public void run() {
- BufferedReader stdErr = null;
+ protected void readStdOut(Channel channel) {
+ try {
+ if (stdOut != null) {
+ OutputStream localStdOut = createOutputStream(stdOut);
try {
- InputStream in = channel.getErrStream();
- stdErr = new BufferedReader(new InputStreamReader(in));
+ IOUtils.copy(channel.getInputStream(), localStdOut);
+ } finally {
+ IOUtils.closeQuietly(localStdOut);
+ }
+ } else if (streamHandler != null) {
+ if (channel.getInputStream() != null)
+ streamHandler.setProcessOutputStream(channel
+ .getInputStream());
+ } else {
+ BufferedReader stdOut = null;
+ try {
+ InputStream in = channel.getInputStream();
+ stdOut = new BufferedReader(new InputStreamReader(in));
String line = null;
- while ((line = stdErr.readLine()) != null) {
- if (!line.trim().equals(""))
- log.error(line);
+ while ((line = stdOut.readLine()) != null) {
+ if (!line.trim().equals("")) {
+
+ if (stdOutLines != null) {
+ stdOutLines.add(line);
+ if (logEvenIfStdOutLines && !quiet)
+ log.info(line);
+ } else {
+ if (!quiet)
+ log.info(line);
+ }
+ }
}
- } catch (IOException e) {
- if (log.isDebugEnabled())
- log.error("Cannot read stderr from " + getSshTarget(),
- e);
} finally {
- IOUtils.closeQuietly(stdErr);
+ IOUtils.closeQuietly(stdOut);
}
}
- }.start();
+ } catch (IOException e) {
+ throw new SlcException("Cannot redirect stdout from "
+ + getSshTarget(), e);
+ }
}
- protected void readStdOut(Channel channel) {
- if (stdOut != null) {
- OutputStream localStdOut = createOutputStream(stdOut);
+ protected void readStdErr(final ChannelExec channel) {
+ if (streamHandler != null) {
try {
- IOUtils.copy(channel.getInputStream(), localStdOut);
+ streamHandler.setProcessOutputStream(channel.getErrStream());
} catch (IOException e) {
- throw new SlcException("Cannot redirect stdout", e);
- } finally {
- IOUtils.closeQuietly(localStdOut);
+ throw new SlcException("Cannot read stderr from "
+ + getSshTarget(), e);
}
} else {
- BufferedReader stdOut = null;
- try {
- InputStream in = channel.getInputStream();
- stdOut = new BufferedReader(new InputStreamReader(in));
- String line = null;
- while ((line = stdOut.readLine()) != null) {
- if (!line.trim().equals("")) {
-
- if (stdOutLines != null) {
- stdOutLines.add(line);
- if (logEvenIfStdOutLines && !quiet)
- log.info(line);
- } else {
- if (!quiet)
- log.info(line);
+ new Thread("stderr " + getSshTarget()) {
+ public void run() {
+ BufferedReader stdErr = null;
+ try {
+ InputStream in = channel.getErrStream();
+ stdErr = new BufferedReader(new InputStreamReader(in));
+ String line = null;
+ while ((line = stdErr.readLine()) != null) {
+ if (!line.trim().equals(""))
+ log.error(line);
}
+ } catch (IOException e) {
+ if (log.isDebugEnabled())
+ log.error("Cannot read stderr from "
+ + getSshTarget(), e);
+ } finally {
+ IOUtils.closeQuietly(stdErr);
}
}
+ }.start();
+ }
+ }
+
+ protected void readStdIn(final ChannelExec channel) {
+ if (stdIn != null) {
+ Thread stdInThread = new Thread("Stdin " + getSshTarget()) {
+ @Override
+ public void run() {
+ OutputStream out = null;
+ try {
+ out = channel.getOutputStream();
+ IOUtils.copy(stdIn.getInputStream(), out);
+ } catch (IOException e) {
+ throw new SlcException("Cannot write stdin on "
+ + getSshTarget(), e);
+ } finally {
+ IOUtils.closeQuietly(out);
+ }
+ }
+ };
+ stdInThread.start();
+ } else if (streamHandler != null) {
+ try {
+ streamHandler.setProcessInputStream(channel.getOutputStream());
} catch (IOException e) {
- if (log.isDebugEnabled())
- log.error("Cannot read stdout from " + getSshTarget(), e);
- } finally {
- IOUtils.closeQuietly(stdOut);
+ throw new SlcException("Cannot write stdin on "
+ + getSshTarget(), e);
}
}
}
protected void checkExitStatus(Channel channel) {
if (channel.isClosed()) {
- int exitStatus = channel.getExitStatus();
- if (exitStatus == 0) {
+ lastExitStatus = channel.getExitStatus();
+ if (lastExitStatus == 0) {
if (log.isTraceEnabled())
- log.trace("Remote execution exit status: " + exitStatus);
+ log.trace("Remote execution exit status: " + lastExitStatus);
} else {
String msg = "Remote execution failed with " + " exit status: "
- + exitStatus;
+ + lastExitStatus;
if (failOnBadExitStatus)
throw new SlcException(msg);
else
return out;
}
+ public Integer getLastExitStatus() {
+ return lastExitStatus;
+ }
+
+ public void setStreamHandler(ExecuteStreamHandler executeStreamHandler) {
+ this.streamHandler = executeStreamHandler;
+ }
+
public void setCommand(String command) {
this.command = command;
}
--- /dev/null
+package org.argeo.slc.lib.linux.rpmfactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.exec.Executor;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.core.execution.tasks.SystemCall;
+
+/** Rebuild an SRPM in mock. (Historical) Replaces the build-mock.sh script. */
+public class BuildInMock implements Runnable {
+ private final static Log log = LogFactory.getLog(BuildInMock.class);
+
+ /** Mock flavour provided by the EPEL repository */
+ public final static String EPEL = "EPEL";
+ /** Mock flavour provided by CentOS until v5 */
+ public final static String CENTOS = "CENTOS";
+
+ public final static String NOARCH = "noarch";
+
+ private String mockVar = "/var/lib/mock";
+
+ private String mockFlavour = EPEL;
+ private String mockConfig = null;
+
+ private String repository;
+ private String release = null;
+ private String level = null;
+ private String arch = NOARCH;
+
+ private String srpm;
+
+ private Boolean mkdirs = true;
+
+ private RpmBuildEnvironment buildEnvironment;
+ private Executor executor;
+
+ public void run() {
+ // TODO check if caller is in mock group
+
+ String cfg = mockConfig != null ? mockConfig : repository + "-"
+ + release + "-" + level + "-" + arch;
+
+ // prepare mock call
+ SystemCall mock = new SystemCall();
+ if (arch != null)
+ mock.arg("setarch").arg(arch);
+ mock.arg("mock");
+ if (mockFlavour.equals(EPEL))
+ mock.arg("-v");
+ else if (mockFlavour.equals(CENTOS))
+ mock.arg("--debug");
+ if (arch != null)
+ mock.arg("--arch=" + arch);
+ mock.arg("-r").arg(cfg);
+ mock.arg(srpm);
+
+ mock.setLogCommand(true);
+
+ // mock command execution
+ mock.setExecutor(executor);
+ mock.run();
+
+ File repoDir = new File(buildEnvironment.getStagingBase() + "/"
+ + repository + "/" + level + "/" + release);
+ File srpmDir = new File(repoDir, "SRPMS");
+ if (mkdirs)
+ srpmDir.mkdirs();
+ File archDir = null;
+ File debuginfoDir = null;
+ if (!arch.equals(NOARCH)) {
+ archDir = new File(repoDir, arch);
+ debuginfoDir = new File(archDir, "debuginfo");
+ debuginfoDir.mkdirs();
+ }
+
+ // copy RPMs
+ Set<File> reposToRecreate = new HashSet<File>();
+ File resultDir = new File(mockVar + "/" + cfg + "/result");
+ rpms: for (File file : resultDir.listFiles()) {
+ if (file.isDirectory())
+ continue rpms;
+
+ File[] targetDirs;
+ if (file.getName().contains(".src.rpm"))
+ targetDirs = new File[] { srpmDir };
+ else if (file.getName().contains("-debuginfo-"))
+ targetDirs = new File[] { debuginfoDir };
+ else if (!arch.equals(NOARCH)
+ && file.getName().contains("." + arch + ".rpm"))
+ targetDirs = new File[] { archDir };
+ else if (file.getName().contains(".noarch.rpm")) {
+ List<File> dirs = new ArrayList<File>();
+ for (String arch : buildEnvironment.getArchs())
+ dirs.add(new File(repoDir, arch));
+ targetDirs = dirs.toArray(new File[dirs.size()]);
+ } else if (file.getName().contains(".rpm"))
+ throw new SlcException("Don't know where to copy " + file);
+ else {
+ if (log.isTraceEnabled())
+ log.trace("Skip " + file);
+ continue rpms;
+ }
+
+ reposToRecreate.addAll(Arrays.asList(targetDirs));
+ copyToDirs(file, targetDirs);
+ }
+
+ // recreate changed repos
+ for (File repoToRecreate : reposToRecreate) {
+ SystemCall createrepo = new SystemCall();
+ createrepo.arg("createrepo");
+ // sqllite db
+ createrepo.arg("-d");
+ // quiet
+ createrepo.arg("-q");
+ createrepo.arg(repoToRecreate.getAbsolutePath());
+
+ createrepo.setExecutor(executor);
+ createrepo.run();
+ log.info("Updated repo " + repoToRecreate);
+ }
+ }
+
+ protected void copyToDirs(File file, File[] dirs) {
+ for (File dir : dirs) {
+ try {
+ FileUtils.copyFileToDirectory(file, dir);
+ if (log.isDebugEnabled())
+ log.debug(file + " => " + dir);
+ } catch (IOException e) {
+ throw new SlcException("Cannot copy " + file + " to " + dir, e);
+ }
+ }
+ }
+
+ public void setMockFlavour(String mockFlavour) {
+ this.mockFlavour = mockFlavour;
+ }
+
+ public void setMockConfig(String mockConfig) {
+ this.mockConfig = mockConfig;
+ }
+
+ public void setRepository(String repo) {
+ this.repository = repo;
+ }
+
+ public void setRelease(String release) {
+ this.release = release;
+ }
+
+ public void setLevel(String level) {
+ this.level = level;
+ }
+
+ public void setArch(String arch) {
+ this.arch = arch;
+ }
+
+ public void setSrpm(String srpm) {
+ this.srpm = srpm;
+ }
+
+ public void setMockVar(String mockVar) {
+ this.mockVar = mockVar;
+ }
+
+ public void setMkdirs(Boolean mkdirs) {
+ this.mkdirs = mkdirs;
+ }
+
+ public void setBuildEnvironment(RpmBuildEnvironment buildEnvironment) {
+ this.buildEnvironment = buildEnvironment;
+ }
+
+ public void setExecutor(Executor executor) {
+ this.executor = executor;
+ }
+
+}
import java.util.ArrayList;
import java.util.List;
+import org.apache.commons.exec.Executor;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
private File topdir;
- /** Directory where to cache downloaded dsitributions. */
+ /** Directory where to cache downloaded distributions. */
private File distributionCache;
private Resource specFile;
private File srpmFile;
+ private Executor executor;
+
public void run() {
File sourcesDir = new File(topdir, "SOURCES");
sourcesDir.mkdirs();
packageSrpm.arg("rpmbuild");
packageSrpm.arg("-bs").arg("--nodeps");
packageSrpm.arg("--rcfile=rpmrc");
+ packageSrpm.arg("--macros=" + RpmBuildEnvironment.defaultMacroFiles
+ + ":rpmmacros");
// buildSrpm.arg("-D", "_topdir " + topdir.getCanonicalPath() + "");
packageSrpm.arg("SPECS/" + specFile.getFilename());
packageSrpm.setExecDir(topdir.getCanonicalPath());
packageSrpm.setLogCommand(true);
// Execute
+ packageSrpm.setExecutor(executor);
String answer = packageSrpm.function();
// Extract generated SRPM path
if (!targetFile.exists() || overwriteSources)
copyResourceToFile(res, targetFile);
if (!targetDir.equals(sourcesDir)) {
- File fileInSourcesDir = new File(sourcesDir, targetFile
- .getName());
+ File fileInSourcesDir = new File(sourcesDir,
+ targetFile.getName());
if (!fileInSourcesDir.exists()
|| !(fileInSourcesDir.length() == targetFile
.length()))
this.distributionCache = distributionCache;
}
+ public void setExecutor(Executor executor) {
+ this.executor = executor;
+ }
+
}
import java.util.List;
import java.util.Map;
+import org.apache.commons.exec.Executor;
import org.apache.commons.io.FileUtils;
import org.argeo.slc.SlcException;
* components performing the various actions related to RPM build.
*/
public class RpmBuildEnvironment {
- private String defaultMacroFiles = "/usr/lib/rpm/macros:/usr/lib/rpm/ia32e-linux/macros:/usr/lib/rpm/redhat/macros:/etc/rpm/macros.*:/etc/rpm/macros:/etc/rpm/ia32e-linux/macros:~/.rpmmacros";
+ static String defaultMacroFiles = "/usr/lib/rpm/macros:/usr/lib/rpm/ia32e-linux/macros:/usr/lib/rpm/redhat/macros:/etc/rpm/macros.*:/etc/rpm/macros:/etc/rpm/ia32e-linux/macros:~/.rpmmacros";
+
private Map<String, String> rpmmacros = new HashMap<String, String>();
+ private List<String> archs = new ArrayList<String>();
+
+ private String stagingBase = System.getProperty("user.home")
+ + "/dev/staging";
+
/** Write (topdir)/rpmmacros and (topdir)/rpmrc */
public void writeRpmbuildConfigFiles(File topdir) {
writeRpmbuildConfigFiles(topdir, new File(topdir, "rpmmacros"),
this.defaultMacroFiles = defaultMacroFiles;
}
+ public void setArchs(List<String> archs) {
+ this.archs = archs;
+ }
+
+ public List<String> getArchs() {
+ return archs;
+ }
+
+ public String getStagingBase() {
+ return stagingBase;
+ }
+
+ public void setStagingBase(String stagingBase) {
+ this.stagingBase = stagingBase;
+ }
}
import java.util.Date;
import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Layout;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
import org.apache.log4j.spi.LoggingEvent;
import org.argeo.slc.core.execution.ExecutionThread;
import org.argeo.slc.core.execution.ProcessThreadGroup;
import org.argeo.slc.execution.ExecutionStep;
-import org.springframework.beans.factory.DisposableBean;
-import org.springframework.beans.factory.InitializingBean;
/** Not meant to be used directly in standard log4j config */
-public class SlcExecutionAppender extends AppenderSkeleton implements
- InitializingBean, DisposableBean {
+public class SlcExecutionAppender extends AppenderSkeleton {
private Boolean disabled = false;
}
};
- private Layout layout = null;
- private String pattern = "%m - %c%n";
+ // private Layout layout = null;
+ // private String pattern = "%m - %c%n";
private Boolean onlyExecutionThread = false;
- public void afterPropertiesSet() {
- if (layout != null)
- setLayout(layout);
- else
- setLayout(new PatternLayout(pattern));
+ public void init() {
+ // if (layout != null)
+ // setLayout(layout);
+ // else
+ // setLayout(new PatternLayout(pattern));
Logger.getRootLogger().addAppender(this);
}
}
}
+ // Check whether we are within an executing process
Thread currentThread = Thread.currentThread();
if (currentThread.getThreadGroup() instanceof ProcessThreadGroup) {
if (onlyExecutionThread
else
type = ExecutionStep.INFO;
- ExecutionStep step = new ExecutionStep(new Date(
- event.getTimeStamp()), type, layout.format(event));
+ ExecutionStep step = new ExecutionStep(event.getLoggerName(),
+ new Date(event.getTimeStamp()), type, event.getMessage()
+ .toString());
try {
dispatching.set(true);
((ProcessThreadGroup) currentThread.getThreadGroup())
- .dispatchAddStep(step);
+ .getSteps().add(step);
} finally {
dispatching.set(false);
}
return false;
}
- public void setLayout(Layout layout) {
- this.layout = layout;
- }
+ // public void setLayout(Layout layout) {
+ // this.layout = layout;
+ // }
- public void setPattern(String pattern) {
- this.pattern = pattern;
+ /** For development purpose, since using regular logging is not easy here */
+ private static void stdOut(Object obj) {
+ System.out.println(obj);
}
+ // public void setPattern(String pattern) {
+ // this.pattern = pattern;
+ // }
+
public void setOnlyExecutionThread(Boolean onlyExecutionThread) {
this.onlyExecutionThread = onlyExecutionThread;
}