org.eclipse.equinox.http.registry,\
argeo.osgi.start.5.slc=\
-org.argeo.slc.node.jackrabbit,\
org.argeo.slc.agent,\
org.argeo.slc.agent.jcr,\
org.argeo.slc.server.repo,\
--- /dev/null
+argeo.osgi.start.2.http=\
+org.eclipse.equinox.http.servlet,\
+org.eclipse.equinox.http.jetty,\
+org.eclipse.equinox.cm,\
+org.eclipse.rap.rwt.osgi
+
+argeo.osgi.start.3.node=\
+org.argeo.cms
+
+argeo.osgi.start.4.apps=\
+org.eclipse.gemini.blueprint.extender
+
+argeo.osgi.start.4.workbench=\
+org.eclipse.equinox.http.registry,\
+
+argeo.osgi.start.5.slc=\
+org.argeo.slc.agent,\
+org.argeo.slc.agent.jcr,\
+org.argeo.slc.server.repo,\
+
+slc.executionModules=org.argeo.tp.factory.core,\
+org.argeo.tp.factory.extras
+
+argeo.node.repo.type=localfs
+
+org.osgi.framework.security=osgi
+java.security.policy=file:../../all.policy
+
+org.osgi.service.http.port=7080
+org.eclipse.equinox.http.jetty.log.stderr.threshold=info
+
+log4j.configuration=file:../../log4j.properties
+org.eclipse.rap.workbenchAutostart=false
+
+# DON'T CHANGE BELOW UNLESS YOU KNOW WHAT YOU ARE DOING
+eclipse.ignoreApp=true
+osgi.noShutdown=true
+org.eclipse.equinox.http.jetty.autostart=false
+org.eclipse.rap.workbenchAutostart=false
+
+# Avoid errors logs (see https://issues.apache.org/jira/browse/JCR-2226)
+org.apache.jackrabbit.core.statementcache=false
\ No newline at end of file
<artifactId>org.argeo.slc.agent.jcr</artifactId>
<version>2.1.5-SNAPSHOT</version>
</dependency>
- <dependency>
- <groupId>org.argeo.slc</groupId>
- <artifactId>org.argeo.slc.node.jackrabbit</artifactId>
- <version>2.1.5-SNAPSHOT</version>
- </dependency>
+<!-- <dependency> -->
+<!-- <groupId>org.argeo.slc</groupId> -->
+<!-- <artifactId>org.argeo.slc.node.jackrabbit</artifactId> -->
+<!-- <version>2.1.5-SNAPSHOT</version> -->
+<!-- </dependency> -->
<!-- Node -->
<dependency>
/** Constants useful across all SLC components */
public interface SlcConstants {
/** Read-write role. */
- public final static String ROLE_SLC = "org.argeo.slc.user";
+ public final static String ROLE_SLC = "cn=org.argeo.slc.user,ou=roles,ou=node";
/** Read only unlogged user */
public final static String USER_ANONYMOUS = "anonymous";
private final static Log log = LogFactory.getLog(NormalizeWorkspace.class);
public final static String ID = DistPlugin.PLUGIN_ID + ".normalizeWorkspace";
- public final static ImageDescriptor DEFAULT_ICON = DistPlugin
- .getImageDescriptor("icons/normalize.gif");
+ public final static ImageDescriptor DEFAULT_ICON = DistPlugin.getImageDescriptor("icons/normalize.gif");
public final static String PARAM_WORKSPACE_NAME = "workspaceName";
public final static String PARAM_TARGET_REPO_PATH = "targetRepoPath";
Session currSession = null;
NormalizeJob job;
try {
- String msg = "Your are about to normalize workspace: "
- + wkspName
+ String msg = "Your are about to normalize workspace: " + wkspName
+ ".\nThis will index OSGi bundles and Maven artifacts, "
+ "it will also convert Maven sources to PDE Sources if needed.\n"
- + "Note that no information will be overwritten: "
- + "all existing information are kept."
+ + "Note that no information will be overwritten: " + "all existing information are kept."
+ "\n\n Do you really want to proceed ?";
- if (!MessageDialog.openConfirm(DistPlugin.getDefault()
- .getWorkbench().getDisplay().getActiveShell(),
+ if (!MessageDialog.openConfirm(DistPlugin.getDefault().getWorkbench().getDisplay().getActiveShell(),
"Confirm workspace normalization", msg))
return null;
currSession = repository.login();
Node repoNode = currSession.getNode(targetRepoPath);
- Repository repository = RepoUtils.getRepository(repositoryFactory,
- keyring, repoNode);
- Credentials credentials = RepoUtils.getRepositoryCredentials(
- keyring, repoNode);
+ Repository repository = RepoUtils.getRepository(repositoryFactory, keyring, repoNode);
+ Credentials credentials = RepoUtils.getRepositoryCredentials(keyring, repoNode);
job = new NormalizeJob(repository.login(credentials, wkspName));
job.setUser(true);
try {
JcrMonitor monitor = new EclipseJcrMonitor(progressMonitor);
// Normalize artifacts
- Query countQuery = session
- .getWorkspace()
- .getQueryManager()
- .createQuery("select file from [nt:file] as file",
- Query.JCR_SQL2);
+ Query countQuery = session.getWorkspace().getQueryManager()
+ .createQuery("select file from [nt:file] as file", Query.JCR_SQL2);
QueryResult result = countQuery.execute();
Long expectedCount = result.getNodes().getSize();
- monitor.beginTask("Normalize artifacts of "
- + session.getWorkspace().getName(),
+ monitor.beginTask("Normalize artifacts of " + session.getWorkspace().getName(),
expectedCount.intValue());
NormalizingTraverser tiv = new NormalizingTraverser(monitor);
Node artifactBaseNode = session.getNode(artifactBasePath);
artifactBaseNode.accept(tiv);
} catch (Exception e) {
- log.error("Error normalizing workspace "
- + session.getWorkspace().getName() + ": "
- + e.getMessage());
+ log.error("Error normalizing workspace " + session.getWorkspace().getName() + ": " + e.getMessage());
if (log.isErrorEnabled())
e.printStackTrace();
return new Status(IStatus.ERROR, DistPlugin.PLUGIN_ID,
- "Cannot normalize distribution "
- + session.getWorkspace().getName(), e);
+ "Cannot normalize distribution " + session.getWorkspace().getName(), e);
} finally {
JcrUtils.logoutQuietly(session);
}
}
@Override
- protected void entering(Property property, int level)
- throws RepositoryException {
+ protected void entering(Property property, int level) throws RepositoryException {
}
@Override
- protected void entering(Node node, int level)
- throws RepositoryException {
+ protected void entering(Node node, int level) throws RepositoryException {
+ if (node.getPath().startsWith(RepoConstants.DIST_DOWNLOAD_BASEPATH))
+ return;
+
if (node.isNodeType(NodeType.NT_FILE)) {
if (node.getName().endsWith("-sources.jar")) {
monitor.subTask(node.getName());
if (node.getSession().hasPendingChanges()) {
node.getSession().save();
if (log.isDebugEnabled())
- log.debug("Processed jar artifact "
- + node.getPath());
+ log.debug("Processed jar artifact " + node.getPath());
}
monitor.worked(1);
}
if (node.getSession().hasPendingChanges()) {
node.getSession().save();
if (log.isDebugEnabled())
- log.debug("Processed pom artifact "
- + node.getPath());
+ log.debug("Processed pom artifact " + node.getPath());
}
monitor.worked(1);
} else {
}
@Override
- protected void leaving(Property property, int level)
- throws RepositoryException {
+ protected void leaving(Property property, int level) throws RepositoryException {
}
@Override
dispose();
nodeSession = nodeRepository.login();
- String reposPath = NodeUtils.getUserHome(nodeSession).getPath()
- + RepoConstants.REPOSITORIES_BASE_PATH;
+ String reposPath = NodeUtils.getUserHome(nodeSession).getPath() + RepoConstants.REPOSITORIES_BASE_PATH;
if (!nodeSession.itemExists(reposPath))
initializeModel(nodeSession);
while (repos.hasNext()) {
Node repoNode = repos.nextNode();
if (repoNode.isNodeType(ArgeoTypes.ARGEO_REMOTE_REPOSITORY)) {
- String label = repoNode.isNodeType(NodeType.MIX_TITLE) ? repoNode
- .getProperty(Property.JCR_TITLE).getString()
- : repoNode.getName();
- repositories.add(new RepoElem(repositoryFactory, keyring,
- repoNode, label));
+ String label = repoNode.isNodeType(NodeType.MIX_TITLE)
+ ? repoNode.getProperty(Property.JCR_TITLE).getString() : repoNode.getName();
+ repositories.add(new RepoElem(repositoryFactory, keyring, repoNode, label));
}
}
} catch (RepositoryException e) {
throw new SlcException("User must be authenticated.");
// make sure base directory is available
- Node repos = JcrUtils.mkdirs(nodeSession, homeNode.getPath()
- + RepoConstants.REPOSITORIES_BASE_PATH);
+ Node repos = JcrUtils.mkdirs(homeNode, RepoConstants.REPOSITORIES_BASE_PATH,null);
if (nodeSession.hasPendingChanges())
nodeSession.save();
// register default local java repository
String alias = RepoConstants.DEFAULT_JAVA_REPOSITORY_ALIAS;
- Repository javaRepository = NodeUtils.getRepositoryByAlias(
- repositoryFactory, alias);
+ Repository javaRepository = NodeUtils.getRepositoryByAlias(repositoryFactory, alias);
if (javaRepository != null) {
if (!repos.hasNode(alias)) {
- Node repoNode = repos.addNode(alias,
- ArgeoTypes.ARGEO_REMOTE_REPOSITORY);
+ Node repoNode = repos.addNode(alias, ArgeoTypes.ARGEO_REMOTE_REPOSITORY);
repoNode.setProperty(ArgeoNames.ARGEO_URI, "vm:///" + alias);
repoNode.addMixin(NodeType.MIX_TITLE);
- repoNode.setProperty(Property.JCR_TITLE,
- RepoConstants.DEFAULT_JAVA_REPOSITORY_LABEL);
+ repoNode.setProperty(Property.JCR_TITLE, RepoConstants.DEFAULT_JAVA_REPOSITORY_LABEL);
nodeSession.save();
}
}
// reset workspace list
wkspViewer.setAllChecked(false);
workspaces.clear();
- session = currSourceRepo.login(currSourceCred);
+ // FIXME make it more generic
+ session = currSourceRepo.login(currSourceCred,RepoConstants.DEFAULT_DEFAULT_WORKSPACE);
// remove unvalid elements
for (String name : session.getWorkspace()
.getAccessibleWorkspaceNames())
URI checkedUri = new URI(uri.getText());
String checkedUriStr = checkedUri.toString();
Repository repository = NodeUtils.getRepositoryByUri(repositoryFactory, checkedUriStr);
- // FIXME make it more generic
- String defaultWorkspace = "main";
if (username.getText().trim().equals("")) {// anonymous
- session = repository.login(defaultWorkspace);
+ session = repository.login(RepoConstants.DEFAULT_DEFAULT_WORKSPACE);
} else {
char[] pwd = password.getTextChars();
SimpleCredentials sc = new SimpleCredentials(username.getText(), pwd);
- session = repository.login(sc, defaultWorkspace);
+ session = repository.login(sc, RepoConstants.DEFAULT_DEFAULT_WORKSPACE);
}
} else {// alias
Repository repository = NodeUtils.getRepositoryByAlias(repositoryFactory, uri.getText());
\r
<!-- REFERENCES -->\r
<reference id="repository" interface="javax.jcr.Repository"\r
- filter="(argeo.jcr.repository.alias=slc)" />\r
+ filter="(argeo.jcr.repository.alias=home)" />\r
\r
<reference id="agent" interface="org.argeo.slc.execution.SlcAgent" />\r
\r
Import-Package: javax.jcr.nodetype,\
+javax.jcr.security,\
org.apache.tools.ant.*;resolution:="optional",\
junit.framework;resolution:="optional",\
*
*/
package org.argeo.slc.core.execution;
+import java.security.AccessControlContext;
+import java.security.AccessController;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.List;
+import javax.security.auth.Subject;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.argeo.slc.SlcException;
import org.argeo.slc.execution.ExecutionFlowDescriptor;
import org.argeo.slc.execution.ExecutionModulesManager;
import org.argeo.slc.execution.ExecutionStep;
private ExecutionModulesManager executionModulesManager;
private final RealizedFlow realizedFlow;
+ private final AccessControlContext accessControlContext;
private List<Runnable> destructionCallbacks = new ArrayList<Runnable>();
- public ExecutionThread(ProcessThreadGroup processThreadGroup,
- ExecutionModulesManager executionModulesManager,
+ public ExecutionThread(ProcessThreadGroup processThreadGroup, ExecutionModulesManager executionModulesManager,
RealizedFlow realizedFlow) {
- super(processThreadGroup, "Flow "
- + realizedFlow.getFlowDescriptor().getName());
+ super(processThreadGroup, "Flow " + realizedFlow.getFlowDescriptor().getName());
this.realizedFlow = realizedFlow;
this.executionModulesManager = executionModulesManager;
+ accessControlContext = AccessController.getContext();
}
public void run() {
// authenticate thread
-// Authentication authentication = getProcessThreadGroup()
-// .getAuthentication();
-// if (authentication == null)
-// throw new SlcException("Can only execute authenticated threads");
-// SecurityContextHolder.getContext().setAuthentication(authentication);
+ // Authentication authentication = getProcessThreadGroup()
+ // .getAuthentication();
+ // if (authentication == null)
+ // throw new SlcException("Can only execute authenticated threads");
+ // SecurityContextHolder.getContext().setAuthentication(authentication);
// Retrieve execution flow descriptor
- ExecutionFlowDescriptor executionFlowDescriptor = realizedFlow
- .getFlowDescriptor();
+ ExecutionFlowDescriptor executionFlowDescriptor = realizedFlow.getFlowDescriptor();
String flowName = executionFlowDescriptor.getName();
getProcessThreadGroup().dispatchAddStep(
- new ExecutionStep(realizedFlow.getModuleName(),
- ExecutionStep.PHASE_START, "Flow " + flowName));
+ new ExecutionStep(realizedFlow.getModuleName(), ExecutionStep.PHASE_START, "Flow " + flowName));
try {
- String autoUpgrade = System
- .getProperty(SYSPROP_EXECUTION_AUTO_UPGRADE);
- if (autoUpgrade != null && autoUpgrade.equals("true"))
- executionModulesManager.upgrade(realizedFlow
- .getModuleNameVersion());
- executionModulesManager.start(realizedFlow.getModuleNameVersion());
- //
- // START FLOW
- //
- executionModulesManager.execute(realizedFlow);
- // END FLOW
+ Subject subject = Subject.getSubject(accessControlContext);
+ try {
+ Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
+
+ @Override
+ public Void run() throws Exception {
+ String autoUpgrade = System.getProperty(SYSPROP_EXECUTION_AUTO_UPGRADE);
+ if (autoUpgrade != null && autoUpgrade.equals("true"))
+ executionModulesManager.upgrade(realizedFlow.getModuleNameVersion());
+ executionModulesManager.start(realizedFlow.getModuleNameVersion());
+ //
+ // START FLOW
+ //
+ executionModulesManager.execute(realizedFlow);
+ // END FLOW
+ return null;
+ }
+
+ });
+ } catch (PrivilegedActionException privilegedActionException) {
+ throw (Exception) privilegedActionException.getCause();
+ }
} catch (FlowConfigurationException e) {
- String msg = "Configuration problem with flow " + flowName + ":\n"
- + e.getMessage();
+ String msg = "Configuration problem with flow " + flowName + ":\n" + e.getMessage();
log.error(msg);
getProcessThreadGroup().dispatchAddStep(
- new ExecutionStep(realizedFlow.getModuleName(),
- ExecutionStep.ERROR, msg + " " + e.getMessage()));
+ new ExecutionStep(realizedFlow.getModuleName(), ExecutionStep.ERROR, msg + " " + e.getMessage()));
} catch (Exception e) {
// TODO: re-throw exception ?
String msg = "Execution of flow " + flowName + " failed.";
log.error(msg, e);
getProcessThreadGroup().dispatchAddStep(
- new ExecutionStep(realizedFlow.getModuleName(),
- ExecutionStep.ERROR, msg + " " + e.getMessage()));
+ new ExecutionStep(realizedFlow.getModuleName(), ExecutionStep.ERROR, msg + " " + e.getMessage()));
} finally {
getProcessThreadGroup().dispatchAddStep(
- new ExecutionStep(realizedFlow.getModuleName(),
- ExecutionStep.PHASE_END, "Flow " + flowName));
+ new ExecutionStep(realizedFlow.getModuleName(), ExecutionStep.PHASE_END, "Flow " + flowName));
processDestructionCallbacks();
}
}
try {
destructionCallbacks.get(i).run();
} catch (Exception e) {
- log.warn("Could not process destruction callback " + i
- + " in thread " + getName(), e);
+ log.warn("Could not process destruction callback " + i + " in thread " + getName(), e);
}
}
}
*/
package org.argeo.slc.core.execution;
+import java.security.AccessControlContext;
+import java.security.AccessController;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
+import javax.security.auth.Subject;
+
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.argeo.slc.SlcException;
import org.argeo.slc.execution.ExecutionModulesManager;
import org.argeo.slc.execution.ExecutionProcess;
import org.argeo.slc.execution.ExecutionStep;
private final ExecutionProcess process;
private final ProcessThreadGroup processThreadGroup;
- private Set<ExecutionThread> executionThreads = Collections
- .synchronizedSet(new HashSet<ExecutionThread>());
+ private Set<ExecutionThread> executionThreads = Collections.synchronizedSet(new HashSet<ExecutionThread>());
// private Boolean hadAnError = false;
private Boolean killed = false;
- public ProcessThread(ThreadGroup processesThreadGroup,
- ExecutionModulesManager executionModulesManager,
+ private final AccessControlContext accessControlContext;
+
+ public ProcessThread(ThreadGroup processesThreadGroup, ExecutionModulesManager executionModulesManager,
ExecutionProcess process) {
super(processesThreadGroup, "SLC Process #" + process.getUuid());
this.executionModulesManager = executionModulesManager;
this.process = process;
processThreadGroup = new ProcessThreadGroup(process);
+ accessControlContext = AccessController.getContext();
}
public final void run() {
// authenticate thread
-// Authentication authentication = getProcessThreadGroup()
-// .getAuthentication();
-// if (authentication == null)
-// throw new SlcException("Can only execute authenticated threads");
-// SecurityContextHolder.getContext().setAuthentication(authentication);
+ // Authentication authentication = getProcessThreadGroup()
+ // .getAuthentication();
+ // if (authentication == null)
+ // throw new SlcException("Can only execute authenticated threads");
+ // SecurityContextHolder.getContext().setAuthentication(authentication);
- log.info("\n##\n## SLC Process #" + process.getUuid()
- + " STARTED\n##\n");
+ log.info("\n##\n## SLC Process #" + process.getUuid() + " STARTED\n##\n");
// Start logging
new LoggingThread().start();
process.setStatus(ExecutionProcess.RUNNING);
try {
- process();
+ Subject subject = Subject.getSubject(accessControlContext);
+ try {
+ Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
+
+ @Override
+ public Void run() throws Exception {
+ process();
+ return null;
+ }
+
+ });
+ } catch (PrivilegedActionException privilegedActionException) {
+ Throwable cause = privilegedActionException.getCause();
+ if (cause instanceof InterruptedException)
+ throw (InterruptedException) cause;
+ else
+ throw new SlcException("Cannot process", cause);
+ }
+ // process();
} catch (InterruptedException e) {
die();
return;
} catch (Exception e) {
- String msg = "Process " + getProcess().getUuid()
- + " failed unexpectedly.";
+ String msg = "Process " + getProcess().getUuid() + " failed unexpectedly.";
log.error(msg, e);
- getProcessThreadGroup().dispatchAddStep(
- new ExecutionStep("Process", ExecutionStep.ERROR, msg + " "
- + e.getMessage()));
+ getProcessThreadGroup()
+ .dispatchAddStep(new ExecutionStep("Process", ExecutionStep.ERROR, msg + " " + e.getMessage()));
}
// waits for all execution threads to complete (in case they were
process.setStatus(ExecutionProcess.COMPLETED);
// executionModulesManager.dispatchUpdateStatus(process, oldStatus,
// process.getStatus());
- log.info("\n## SLC Process #" + process.getUuid() + " "
- + process.getStatus() + "\n");
+ log.info("\n## SLC Process #" + process.getUuid() + " " + process.getStatus() + "\n");
}
/** Called when being killed */
}
/** @return the (distinct) thread used for this execution */
- protected final void execute(RealizedFlow realizedFlow, Boolean synchronous)
- throws InterruptedException {
+ protected final void execute(RealizedFlow realizedFlow, Boolean synchronous) throws InterruptedException {
if (killed)
return;
- ExecutionThread thread = new ExecutionThread(processThreadGroup,
- executionModulesManager, realizedFlow);
+ ExecutionThread thread = new ExecutionThread(processThreadGroup, executionModulesManager, realizedFlow);
executionThreads.add(thread);
thread.start();
break;
}
- if (!ProcessThread.this.isAlive()
- && processThreadGroup.getSteps().size() == 0)
+ if (!ProcessThread.this.isAlive() && processThreadGroup.getSteps().size() == 0)
run = false;
}
}
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
+import javax.jcr.security.Privilege;
import org.argeo.jcr.JcrUtils;
+import org.argeo.slc.SlcConstants;
import org.argeo.slc.SlcException;
import org.argeo.slc.core.execution.DefaultAgent;
import org.argeo.slc.core.execution.ProcessThread;
session = repository.login();
String agentFactoryPath = getAgentFactoryPath();
- Node vmAgentFactoryNode = JcrUtils.mkdirsSafe(session,
- agentFactoryPath, SlcTypes.SLC_AGENT_FACTORY);
+ Node vmAgentFactoryNode = JcrUtils.mkdirsSafe(session, agentFactoryPath, SlcTypes.SLC_AGENT_FACTORY);
+ JcrUtils.addPrivilege(session, SlcJcrConstants.SLC_BASE_PATH, SlcConstants.ROLE_SLC, Privilege.JCR_ALL);
if (!vmAgentFactoryNode.hasNode(agentNodeName)) {
String uuid = UUID.randomUUID().toString();
- Node agentNode = vmAgentFactoryNode.addNode(agentNodeName,
- SlcTypes.SLC_AGENT);
+ Node agentNode = vmAgentFactoryNode.addNode(agentNodeName, SlcTypes.SLC_AGENT);
agentNode.setProperty(SLC_UUID, uuid);
}
session.save();
- return vmAgentFactoryNode.getNode(agentNodeName)
- .getProperty(SLC_UUID).getString();
+ return vmAgentFactoryNode.getNode(agentNodeName).getProperty(SLC_UUID).getString();
} catch (RepositoryException e) {
JcrUtils.discardQuietly(session);
throw new SlcException("Cannot find JCR agent UUID", e);
* SLC AGENT
*/
@Override
- protected ProcessThread createProcessThread(
- ThreadGroup processesThreadGroup,
+ protected ProcessThread createProcessThread(ThreadGroup processesThreadGroup,
ExecutionModulesManager modulesManager, ExecutionProcess process) {
if (process instanceof JcrExecutionProcess)
- return new JcrProcessThread(processesThreadGroup, modulesManager,
- (JcrExecutionProcess) process);
+ return new JcrProcessThread(processesThreadGroup, modulesManager, (JcrExecutionProcess) process);
else
- return super.createProcessThread(processesThreadGroup,
- modulesManager, process);
+ return super.createProcessThread(processesThreadGroup, modulesManager, process);
}
/*
String agentFactoryPath;
if (isRemote) {
InetAddress localhost = InetAddress.getLocalHost();
- agentFactoryPath = SlcJcrConstants.AGENTS_BASE_PATH + "/"
- + localhost.getCanonicalHostName();
+ agentFactoryPath = SlcJcrConstants.AGENTS_BASE_PATH + "/" + localhost.getCanonicalHostName();
- if (agentFactoryPath
- .equals(SlcJcrConstants.VM_AGENT_FACTORY_PATH))
- throw new SlcException("Unsupported hostname "
- + localhost.getCanonicalHostName());
+ if (agentFactoryPath.equals(SlcJcrConstants.VM_AGENT_FACTORY_PATH))
+ throw new SlcException("Unsupported hostname " + localhost.getCanonicalHostName());
} else {// local
agentFactoryPath = SlcJcrConstants.VM_AGENT_FACTORY_PATH;
}
init-method="run">
<property name="principalPrivileges">
<map>
- <entry key="jcr:all" value="org.argeo.slc.user" />
+ <entry key="jcr:all" value="cn=org.argeo.slc.user,ou=roles,ou=node" />
</map>
</property>
<property name="repository" ref="slcRepository" />
/** SLC repository constants */
public interface RepoConstants {
- public final static String DEFAULT_JAVA_REPOSITORY_ALIAS = "java";
- public final static String DEFAULT_JAVA_REPOSITORY_LABEL = "Internal Java Repository";
+ String DEFAULT_JAVA_REPOSITORY_ALIAS = "java";
+ String DEFAULT_JAVA_REPOSITORY_LABEL = "Internal Java Repository";
+
+
+ String DEFAULT_ARTIFACTS_BASE_PATH = "/";
+ String REPO_BASEPATH = "/slc:repo";
+ String PROXIED_REPOSITORIES = REPO_BASEPATH + "/slc:sources";
+ String DISTRIBUTIONS_BASE_PATH = REPO_BASEPATH + "/slc:distributions";
+ String REPOSITORIES_BASE_PATH = REPO_BASEPATH + "/slc:repositories";
+ String DIST_DOWNLOAD_BASEPATH = "/download";
+
+ String BINARIES_ARTIFACT_ID = "binaries";
+ String SOURCES_ARTIFACT_ID = "sources";
+ String SDK_ARTIFACT_ID = "sdk";
- public final static String DEFAULT_ARTIFACTS_BASE_PATH = "/";
- public final static String REPO_BASEPATH = "/slc:repo";
- public final static String PROXIED_REPOSITORIES = REPO_BASEPATH
- + "/slc:sources";
- public final static String DISTRIBUTIONS_BASE_PATH = REPO_BASEPATH
- + "/slc:distributions";
- public final static String REPOSITORIES_BASE_PATH = REPO_BASEPATH
- + "/slc:repositories";
- public final static String BINARIES_ARTIFACT_ID = "binaries";
- public final static String SOURCES_ARTIFACT_ID = "sources";
- public final static String SDK_ARTIFACT_ID = "sdk";
-
// TODO might exists somewhere else
- public final static String SLC_GROUP_ID = "SLC-GroupId";
+ String SLC_GROUP_ID = "SLC-GroupId";
+
+ // TODO find a more generic way
+ String DEFAULT_DEFAULT_WORKSPACE = "main";
}
private final static Log log = LogFactory.getLog(RepoSync.class);
// Centralizes definition of workspaces that must be ignored by the sync.
- private final static List<String> IGNORED_WKSP_LIST = Arrays.asList(
- "security", "localrepo");
+ private final static List<String> IGNORED_WKSP_LIST = Arrays.asList("security", "localrepo");
private final Calendar zero;
private Session sourceDefaultSession = null;
* @param targetRepository
* @param targetCredentials
*/
- public RepoSync(Repository sourceRepository, Credentials sourceCredentials,
- Repository targetRepository, Credentials targetCredentials) {
+ public RepoSync(Repository sourceRepository, Credentials sourceCredentials, Repository targetRepository,
+ Credentials targetCredentials) {
this();
this.sourceRepository = sourceRepository;
this.sourceCredentials = sourceCredentials;
// Setup
if (sourceRepository == null)
- sourceRepository = NodeUtils.getRepositoryByUri(
- repositoryFactory, sourceRepoUri);
+ sourceRepository = NodeUtils.getRepositoryByUri(repositoryFactory, sourceRepoUri);
if (sourceCredentials == null && sourceUsername != null)
- sourceCredentials = new SimpleCredentials(sourceUsername,
- sourcePassword);
- sourceDefaultSession = sourceRepository.login(sourceCredentials);
+ sourceCredentials = new SimpleCredentials(sourceUsername, sourcePassword);
+ // FIXME make it more generic
+ sourceDefaultSession = sourceRepository.login(sourceCredentials, RepoConstants.DEFAULT_DEFAULT_WORKSPACE);
if (targetRepository == null)
- targetRepository = NodeUtils.getRepositoryByUri(
- repositoryFactory, targetRepoUri);
+ targetRepository = NodeUtils.getRepositoryByUri(repositoryFactory, targetRepoUri);
if (targetCredentials == null && targetUsername != null)
- targetCredentials = new SimpleCredentials(targetUsername,
- targetPassword);
+ targetCredentials = new SimpleCredentials(targetUsername, targetPassword);
targetDefaultSession = targetRepository.login(targetCredentials);
Map<String, Exception> errors = new HashMap<String, Exception>();
- for (String sourceWorkspaceName : sourceDefaultSession
- .getWorkspace().getAccessibleWorkspaceNames()) {
+ for (String sourceWorkspaceName : sourceDefaultSession.getWorkspace().getAccessibleWorkspaceNames()) {
if (monitor != null && monitor.isCanceled())
break;
- if (workspaceMap != null
- && !workspaceMap.containsKey(sourceWorkspaceName))
+ if (workspaceMap != null && !workspaceMap.containsKey(sourceWorkspaceName))
continue;
if (IGNORED_WKSP_LIST.contains(sourceWorkspaceName))
continue;
Session sourceSession = null;
Session targetSession = null;
- String targetWorkspaceName = workspaceMap
- .get(sourceWorkspaceName);
+ String targetWorkspaceName = workspaceMap.get(sourceWorkspaceName);
try {
try {
- targetSession = targetRepository.login(
- targetCredentials, targetWorkspaceName);
+ targetSession = targetRepository.login(targetCredentials, targetWorkspaceName);
} catch (NoSuchWorkspaceException e) {
- targetDefaultSession.getWorkspace().createWorkspace(
- targetWorkspaceName);
- targetSession = targetRepository.login(
- targetCredentials, targetWorkspaceName);
+ targetDefaultSession.getWorkspace().createWorkspace(targetWorkspaceName);
+ targetSession = targetRepository.login(targetCredentials, targetWorkspaceName);
}
- sourceSession = sourceRepository.login(sourceCredentials,
- sourceWorkspaceName);
+ sourceSession = sourceRepository.login(sourceCredentials, sourceWorkspaceName);
syncWorkspace(sourceSession, targetSession);
} catch (Exception e) {
- errors.put("Could not sync workspace "
- + sourceWorkspaceName, e);
+ errors.put("Could not sync workspace " + sourceWorkspaceName, e);
if (log.isErrorEnabled())
e.printStackTrace();
log.info("Sync has been canceled by user");
long duration = (System.currentTimeMillis() - begin) / 1000;// s
- log.info("Sync " + sourceRepoUri + " to " + targetRepoUri + " in "
- + (duration / 60)
+ log.info("Sync " + sourceRepoUri + " to " + targetRepoUri + " in " + (duration / 60)
+ "min " + (duration % 60) + "s");
throw new SlcException("Sync failed " + errors);
}
} catch (RepositoryException e) {
- throw new SlcException("Cannot sync " + sourceRepoUri + " to "
- + targetRepoUri, e);
+ throw new SlcException("Cannot sync " + sourceRepoUri + " to " + targetRepoUri, e);
} finally {
JcrUtils.logoutQuietly(sourceDefaultSession);
JcrUtils.logoutQuietly(targetDefaultSession);
if (IGNORED_WKSP_LIST.contains(session.getWorkspace().getName()))
return 0l;
try {
- Query countQuery = session
- .getWorkspace()
- .getQueryManager()
- .createQuery(
- "select file from ["
- + (true ? NodeType.NT_FILE
- : NodeType.NT_BASE) + "] as file",
- Query.JCR_SQL2);
+ Query countQuery = session.getWorkspace().getQueryManager().createQuery(
+ "select file from [" + (true ? NodeType.NT_FILE : NodeType.NT_BASE) + "] as file", Query.JCR_SQL2);
QueryResult result = countQuery.execute();
Long expectedCount = result.getNodes().getSize();
return expectedCount;
} catch (RepositoryException e) {
- throw new SlcException("Unexpected error while computing "
- + "the size of the fetch for workspace "
+ throw new SlcException("Unexpected error while computing " + "the size of the fetch for workspace "
+ session.getWorkspace().getName(), e);
}
}
}
try {
- String msg = "Synchronizing workspace: "
- + sourceSession.getWorkspace().getName();
+ String msg = "Synchronizing workspace: " + sourceSession.getWorkspace().getName();
if (monitor != null)
monitor.setTaskName(msg);
if (log.isDebugEnabled())
log.debug(msg);
if (filesOnly) {
- JcrUtils.copyFiles(sourceSession.getRootNode(),
- targetSession.getRootNode(), true, monitor);
+ JcrUtils.copyFiles(sourceSession.getRootNode(), targetSession.getRootNode(), true, monitor);
} else {
- for (NodeIterator it = sourceSession.getRootNode().getNodes(); it
- .hasNext();) {
+ for (NodeIterator it = sourceSession.getRootNode().getNodes(); it.hasNext();) {
Node node = it.nextNode();
if (node.getName().equals("jcr:system"))
continue;
log.debug("Synced " + sourceSession.getWorkspace().getName());
} catch (Exception e) {
e.printStackTrace();
- throw new SlcException("Cannot sync "
- + sourceSession.getWorkspace().getName() + " to "
+ throw new SlcException("Cannot sync " + sourceSession.getWorkspace().getName() + " to "
+ targetSession.getWorkspace().getName(), e);
}
}
updateMonitor(msg, false);
}
- protected void syncNode(Node sourceNode, Session targetSession)
- throws RepositoryException, SAXException {
+ protected void syncNode(Node sourceNode, Session targetSession) throws RepositoryException, SAXException {
// Boolean singleLevel = singleLevel(sourceNode);
try {
if (monitor != null && monitor.isCanceled()) {
- updateMonitor("Fetched has been canceled, "
- + "process is terminating");
+ updateMonitor("Fetched has been canceled, " + "process is terminating");
return;
}
- Node targetParentNode = targetSession.getNode(sourceNode
- .getParent().getPath());
+ Node targetParentNode = targetSession.getNode(sourceNode.getParent().getPath());
Node targetNode;
- if (monitor != null
- && sourceNode.isNodeType(NodeType.NT_HIERARCHY_NODE))
+ if (monitor != null && sourceNode.isNodeType(NodeType.NT_HIERARCHY_NODE))
monitor.subTask("Process " + sourceNode.getPath());
final Boolean isNew;
if (!targetSession.itemExists(sourceNode.getPath())) {
isNew = true;
- targetNode = targetParentNode.addNode(sourceNode.getName(),
- sourceNode.getPrimaryNodeType().getName());
+ targetNode = targetParentNode.addNode(sourceNode.getName(), sourceNode.getPrimaryNodeType().getName());
} else {
isNew = false;
targetNode = targetSession.getNode(sourceNode.getPath());
- if (!targetNode.getPrimaryNodeType().getName()
- .equals(sourceNode.getPrimaryNodeType().getName()))
- targetNode.setPrimaryType(sourceNode.getPrimaryNodeType()
- .getName());
+ if (!targetNode.getPrimaryNodeType().getName().equals(sourceNode.getPrimaryNodeType().getName()))
+ targetNode.setPrimaryType(sourceNode.getPrimaryNodeType().getName());
}
// export
// mixin and properties
for (NodeType nt : sourceNode.getMixinNodeTypes()) {
- if (!targetNode.isNodeType(nt.getName())
- && targetNode.canAddMixin(nt.getName()))
+ if (!targetNode.isNodeType(nt.getName()) && targetNode.canAddMixin(nt.getName()))
targetNode.addMixin(nt.getName());
}
copyProperties(sourceNode, targetNode);
if (sourceNode.isNodeType(NodeType.NT_HIERARCHY_NODE)) {
if (targetSession.hasPendingChanges()) {
if (sourceNode.isNodeType(NodeType.NT_FILE))
- updateMonitor((isNew ? "Added " : "Updated ")
- + targetNode.getPath(), true);
+ updateMonitor((isNew ? "Added " : "Updated ") + targetNode.getPath(), true);
// if (doSave)
targetSession.save();
} else {
}
}
- private void copyTimestamps(Node sourceNode, Node targetNode)
- throws RepositoryException {
+ private void copyTimestamps(Node sourceNode, Node targetNode) throws RepositoryException {
if (sourceNode.getDefinition().isProtected())
return;
if (targetNode.getDefinition().isProtected())
copyTimestamp(sourceNode, targetNode, Property.JCR_LAST_MODIFIED_BY);
}
- private void copyTimestamp(Node sourceNode, Node targetNode, String property)
- throws RepositoryException {
+ private void copyTimestamp(Node sourceNode, Node targetNode, String property) throws RepositoryException {
if (sourceNode.hasProperty(property)) {
Property p = sourceNode.getProperty(property);
if (p.getDefinition().isProtected())
return;
if (targetNode.hasProperty(property)
- && targetNode
- .getProperty(property)
- .getValue()
- .equals(sourceNode.getProperty(property).getValue()))
+ && targetNode.getProperty(property).getValue().equals(sourceNode.getProperty(property).getValue()))
return;
- targetNode.setProperty(property, sourceNode.getProperty(property)
- .getValue());
+ targetNode.setProperty(property, sourceNode.getProperty(property).getValue());
}
}
- private void copyProperties(Node sourceNode, Node targetNode)
- throws RepositoryException {
- properties: for (PropertyIterator pi = sourceNode.getProperties(); pi
- .hasNext();) {
+ private void copyProperties(Node sourceNode, Node targetNode) throws RepositoryException {
+ properties: for (PropertyIterator pi = sourceNode.getProperties(); pi.hasNext();) {
Property p = pi.nextProperty();
if (p.getDefinition().isProtected())
continue properties;
- if (p.getName().equals(Property.JCR_CREATED)
- || p.getName().equals(Property.JCR_CREATED_BY)
+ if (p.getName().equals(Property.JCR_CREATED) || p.getName().equals(Property.JCR_CREATED_BY)
|| p.getName().equals(Property.JCR_LAST_MODIFIED)
|| p.getName().equals(Property.JCR_LAST_MODIFIED_BY))
continue properties;
if (p.isMultiple()) {
if (!targetNode.hasProperty(p.getName())
- || !Arrays.equals(
- targetNode.getProperty(p.getName())
- .getValues(), p.getValues()))
+ || !Arrays.equals(targetNode.getProperty(p.getName()).getValues(), p.getValues()))
targetNode.setProperty(p.getName(), p.getValues());
} else {
if (!targetNode.hasProperty(p.getName())
- || !targetNode.getProperty(p.getName()).getValue()
- .equals(p.getValue()))
+ || !targetNode.getProperty(p.getName()).getValue().equals(p.getValue()))
targetNode.setProperty(p.getName(), p.getValue());
}
}
}
}
- private static void copyBinary(Property p, Node targetNode)
- throws RepositoryException {
+ private static void copyBinary(Property p, Node targetNode) throws RepositoryException {
InputStream in = null;
Binary sourceBinary = null;
Binary targetBinary = null;
}
in = sourceBinary.getStream();
- targetBinary = targetNode.getSession().getValueFactory()
- .createBinary(in);
+ targetBinary = targetNode.getSession().getValueFactory().createBinary(in);
targetNode.setProperty(p.getName(), targetBinary);
} catch (Exception e) {
throw new SlcException("Could not transfer " + p, e);
// throws RepositoryException, SAXException {
//
// // enable cancelation of the current fetch process
- // // FIXME insure the repository stays in a stable state
+ // // fxme insure the repository stays in a stable state
// if (monitor != null && monitor.isCanceled()) {
// updateMonitor("Fetched has been canceled, "
// + "process is terminating");
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
+import javax.jcr.security.Privilege;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.argeo.slc.jcr.SlcTypes;
import org.argeo.slc.repo.NodeIndexer;
import org.argeo.slc.repo.OsgiFactory;
+import org.argeo.slc.repo.RepoConstants;
import org.argeo.slc.repo.maven.MavenConventionsUtils;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
private Map<String, List<String>> mirrors = new HashMap<String, List<String>>();
private List<String> mavenRepositories = new ArrayList<String>();
- private String downloadBase = "/download";
+ private String downloadBase = RepoConstants.DIST_DOWNLOAD_BASEPATH;
private String mavenProxyBase = downloadBase + "/maven";
public void init() {
distSession = JcrUtils.loginOrCreateWorkspace(distRepository, workspace);
// Privileges
- JcrUtils.addPrivilege(javaSession, "/", SlcConstants.ROLE_SLC, "jcr:all");
- JcrUtils.addPrivilege(distSession, "/", SlcConstants.ROLE_SLC, "jcr:all");
+ JcrUtils.addPrivilege(javaSession, "/", SlcConstants.ROLE_SLC, Privilege.JCR_ALL);
+ JcrUtils.addPrivilege(distSession, "/", SlcConstants.ROLE_SLC, Privilege.JCR_ALL);
} catch (RepositoryException e) {
throw new SlcException("Cannot initialize OSGi Factory " + workspace, e);
} finally {
class="org.argeo.jcr.JcrAuthorizations" init-method="run">\r
<property name="principalPrivileges">\r
<map>\r
- <entry key="jcr:all" value="org.argeo.slc.user" />\r
+ <entry key="jcr:all" value="cn=org.argeo.slc.user,ou=roles,ou=node" />\r
</map>\r
</property>\r
<property name="workspace" value="*" />\r
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.argeo.commons</groupId>
<artifactId>argeo-commons</artifactId>
- <version>2.1.46-SNAPSHOT</version>
+ <version>2.1.46</version>
</parent>
<groupId>org.argeo.slc</groupId>
<artifactId>argeo-slc</artifactId>
<!-- Modules -->
<module>org.argeo.slc.agent</module>
- <module>org.argeo.slc.node.jackrabbit</module>
+ <!-- <module>org.argeo.slc.node.jackrabbit</module> -->
<module>org.argeo.slc.agent.jcr</module>
<module>org.argeo.slc.server.repo</module>
<checksumPolicy>warn</checksumPolicy>
</releases>
</repository>
-<!-- <repository> -->
-<!-- <id>argeo-tp-extras</id> -->
-<!-- <url>http://localhost:7080/data/public/java/argeo-tp-extras-2.1</url> -->
-<!-- <releases> -->
-<!-- <enabled>true</enabled> -->
-<!-- <updatePolicy>daily</updatePolicy> -->
-<!-- <checksumPolicy>warn</checksumPolicy> -->
-<!-- </releases> -->
-<!-- </repository> -->
+ <!-- <repository> -->
+ <!-- <id>argeo-tp-extras</id> -->
+ <!-- <url>http://localhost:7080/data/public/java/argeo-tp-extras-2.1</url> -->
+ <!-- <releases> -->
+ <!-- <enabled>true</enabled> -->
+ <!-- <updatePolicy>daily</updatePolicy> -->
+ <!-- <checksumPolicy>warn</checksumPolicy> -->
+ <!-- </releases> -->
+ <!-- </repository> -->
<repository>
<id>argeo-commons</id>
<url>http://localhost:7070/data/public/java/argeo-${developmentCycle.argeo-commons}</url>