argeo.osgi.start.3.node=\
org.argeo.cms
+argeo.osgi.start.4.node=\
+org.argeo.cms.jcr
+
argeo.osgi.start.5.node=\
org.argeo.cms.e4.rap
-Provide-Capability: cms.datamodel;name=ldap;cnd=/org/argeo/api/ldap.cnd;abstract=true
+++ /dev/null
-<ldap = 'http://www.argeo.org/ns/ldap'>
+++ /dev/null
-<node = 'http://www.argeo.org/ns/node'>
-
-[node:userHome]
-mixin
-- ldap:uid (STRING) m
-
-[node:groupHome]
-mixin
-- ldap:cn (STRING) m
<arguments>
</arguments>
</buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.ds.core.builder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.pde.PluginNature</nature>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="init" deactivate="destroy" name="org.argeo.cms.dataServletContext">
+ <implementation class="org.argeo.cms.servlet.CmsServletContext"/>
+ <service>
+ <provide interface="org.osgi.service.http.context.ServletContextHelper"/>
+ </service>
+ <property name="osgi.http.whiteboard.context.name" type="String" value="dataServletContext"/>
+ <property name="osgi.http.whiteboard.context.path" type="String" value="/data"/>
+</scr:component>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" name="org.argeo.cms.filesServlet">
+ <implementation class="org.argeo.cms.jcr.internal.servlet.CmsWebDavServlet"/>
+ <service>
+ <provide interface="javax.servlet.Servlet"/>
+ </service>
+ <property name="osgi.http.whiteboard.servlet.pattern" type="String" value="/*"/>
+ <property name="osgi.http.whiteboard.context.select" type="String" value="(osgi.http.whiteboard.context.name=filesServletContext)"/>
+ <property name="servlet.init.resource-config" type="String" value="/org/argeo/cms/jcr/internal/servlet/webdav-config.xml"/>
+ <property name="servlet.init.resource-path-prefix" type="String" value="/files"/>
+ <reference bind="setRepository" cardinality="1..1" interface="javax.jcr.Repository" policy="static" target="(cn=ego)"/>
+</scr:component>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="init" deactivate="destroy" name="org.argeo.cms.filesServletContext">
+ <implementation class="org.argeo.cms.servlet.PrivateWwwAuthServletContext"/>
+ <service>
+ <provide interface="org.osgi.service.http.context.ServletContextHelper"/>
+ </service>
+ <property name="osgi.http.whiteboard.context.name" type="String" value="filesServletContext"/>
+ <property name="osgi.http.whiteboard.context.path" type="String" value="/files"/>
+</scr:component>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="init" deactivate="destroy" name="JCR Deployment">
+ <implementation class="org.argeo.cms.jcr.internal.JcrDeployment"/>
+ <reference bind="setNodeDeployment" cardinality="1..1" interface="org.argeo.api.NodeDeployment" name="NodeDeployment" policy="static"/>
+</scr:component>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="init" deactivate="destroy" name="org.argeo.cms.jcrServletContext">
+ <implementation class="org.argeo.cms.servlet.PrivateWwwAuthServletContext"/>
+ <service>
+ <provide interface="org.osgi.service.http.context.ServletContextHelper"/>
+ </service>
+ <property name="osgi.http.whiteboard.context.name" type="String" value="jcrServletContext"/>
+ <property name="osgi.http.whiteboard.context.path" type="String" value="/jcr"/>
+</scr:component>
+Bundle-Activator: org.argeo.cms.jcr.internal.osgi.CmsJcrActivator
+
Provide-Capability:\
-cms.datamodel; name=jcrx; cnd=/org/argeo/jcr/jcrx.cnd; abstract=true
+cms.datamodel; name=jcrx; cnd=/org/argeo/jcr/jcrx.cnd; abstract=true,\
+cms.datamodel; name=argeo; cnd=/org/argeo/cms/jcr/argeo.cnd; abstract=true,\
+cms.datamodel;name=ldap; cnd=/org/argeo/cms/jcr/ldap.cnd; abstract=true,\
+osgi.service;objectClass="javax.jcr.Repository"
Import-Package:\
+org.argeo.cms.servlet,\
+javax.jcr.security,\
+org.h2;resolution:=optional,\
+org.postgresql;resolution:=optional,\
+org.apache.jackrabbit.webdav.server,\
+org.apache.jackrabbit.webdav.jcr,\
+org.apache.commons.httpclient.cookie;resolution:=optional,\
+org.osgi.framework.namespace;version=0.0.0,\
+org.osgi.*;version=0.0.0,\
+org.osgi.service.http.whiteboard,\
org.apache.jackrabbit.api,\
org.apache.jackrabbit.commons,\
org.apache.jackrabbit.spi,\
org.apache.jackrabbit.spi2davex,\
org.apache.jackrabbit.webdav,\
junit.*;resolution:=optional,\
-*
\ No newline at end of file
+*
+
+Service-Component:\
+OSGI-INF/jcrDeployment.xml,\
+OSGI-INF/jcrServletContext.xml,\
+OSGI-INF/dataServletContext.xml,\
+OSGI-INF/filesServletContext.xml,\
+OSGI-INF/filesServlet.xml
-source.. = src/,\
- ext/test/
output.. = bin/
bin.includes = META-INF/,\
- .
+ .,\
+ OSGI-INF/jcrDeployment.xml
+source.. = src/,\
+ ext/test/
additional.bundles = org.junit,\
org.hamcrest,\
org.apache.jackrabbit.core,\
org.apache.httpcomponents.httpclient,\
org.apache.httpcomponents.httpcore,\
org.apache.tika.parsers
-
\ No newline at end of file
--- /dev/null
+package org.argeo.cms.tabular;
+
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.jcr.Node;
+import javax.jcr.PropertyType;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.commons.cnd.CndImporter;
+import org.argeo.api.tabular.TabularColumn;
+import org.argeo.api.tabular.TabularRow;
+import org.argeo.api.tabular.TabularRowIterator;
+import org.argeo.api.tabular.TabularWriter;
+import org.argeo.cms.ArgeoTypes;
+import org.argeo.jackrabbit.unit.AbstractJackrabbitTestCase;
+
+public class JcrTabularTest extends AbstractJackrabbitTestCase {
+ private final static Log log = LogFactory.getLog(JcrTabularTest.class);
+
+ public void testWriteReadCsv() throws Exception {
+ // session().setNamespacePrefix("argeo", ArgeoNames.ARGEO_NAMESPACE);
+ InputStreamReader reader = new InputStreamReader(getClass().getResourceAsStream("/org/argeo/api/ldap.cnd"));
+ CndImporter.registerNodeTypes(reader, session());
+ reader.close();
+ reader = new InputStreamReader(getClass().getResourceAsStream("/org/argeo/cms/argeo.cnd"));
+ CndImporter.registerNodeTypes(reader, session());
+ reader.close();
+// reader = new InputStreamReader(getClass().getResourceAsStream("/org/argeo/cms/cms.cnd"));
+// CndImporter.registerNodeTypes(reader, session());
+// reader.close();
+
+ // write
+ Integer columnCount = 15;
+ Long rowCount = 1000l;
+ String stringValue = "test, \ntest";
+
+ List<TabularColumn> header = new ArrayList<TabularColumn>();
+ for (int i = 0; i < columnCount; i++) {
+ header.add(new TabularColumn("col" + i, PropertyType.STRING));
+ }
+ Node tableNode = session().getRootNode().addNode("table", ArgeoTypes.ARGEO_TABLE);
+ TabularWriter writer = new JcrTabularWriter(tableNode, header, ArgeoTypes.ARGEO_CSV);
+ for (int i = 0; i < rowCount; i++) {
+ List<Object> objs = new ArrayList<Object>();
+ for (int j = 0; j < columnCount; j++) {
+ objs.add(stringValue);
+ }
+ writer.appendRow(objs.toArray());
+ }
+ writer.close();
+ session().save();
+
+ if (log.isDebugEnabled())
+ log.debug("Wrote tabular content " + rowCount + " rows, " + columnCount + " columns");
+ // read
+ TabularRowIterator rowIt = new JcrTabularRowIterator(tableNode);
+ Long count = 0l;
+ while (rowIt.hasNext()) {
+ TabularRow tr = rowIt.next();
+ assertEquals(header.size(), tr.size());
+ count++;
+ }
+ assertEquals(rowCount, count);
+ if (log.isDebugEnabled())
+ log.debug("Read tabular content " + rowCount + " rows, " + columnCount + " columns");
+ }
+}
<artifactId>org.argeo.core</artifactId>
<version>2.3-SNAPSHOT</version>
</dependency>
+ <dependency>
+ <groupId>org.argeo.commons</groupId>
+ <artifactId>org.argeo.cms</artifactId>
+ <version>2.3-SNAPSHOT</version>
+ </dependency>
</dependencies>
</project>
\ No newline at end of file
--- /dev/null
+package org.argeo.cms.fs;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.FileSystem;
+import java.nio.file.Path;
+import java.nio.file.spi.FileSystemProvider;
+
+import javax.jcr.NoSuchWorkspaceException;
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.query.Query;
+import javax.jcr.query.QueryManager;
+
+import org.argeo.api.NodeConstants;
+import org.argeo.jcr.Jcr;
+
+/** Utilities around documents. */
+public class CmsFsUtils {
+ // TODO make it more robust and configurable
+ private static String baseWorkspaceName = NodeConstants.SYS_WORKSPACE;
+
+ public static Node getNode(Repository repository, Path path) {
+ String workspaceName = path.getNameCount() == 0 ? baseWorkspaceName : path.getName(0).toString();
+ String jcrPath = '/' + path.subpath(1, path.getNameCount()).toString();
+ try {
+ Session newSession;
+ try {
+ newSession = repository.login(workspaceName);
+ } catch (NoSuchWorkspaceException e) {
+ // base workspace
+ newSession = repository.login(baseWorkspaceName);
+ jcrPath = path.toString();
+ }
+ return newSession.getNode(jcrPath);
+ } catch (RepositoryException e) {
+ throw new IllegalStateException("Cannot get node from path " + path, e);
+ }
+ }
+
+ public static NodeIterator getLastUpdatedDocuments(Session session) {
+ try {
+ String qStr = "//element(*, nt:file)";
+ qStr += " order by @jcr:lastModified descending";
+ QueryManager queryManager = session.getWorkspace().getQueryManager();
+ @SuppressWarnings("deprecation")
+ Query xpathQuery = queryManager.createQuery(qStr, Query.XPATH);
+ xpathQuery.setLimit(8);
+ NodeIterator nit = xpathQuery.execute().getNodes();
+ return nit;
+ } catch (RepositoryException e) {
+ throw new IllegalStateException("Unable to retrieve last updated documents", e);
+ }
+ }
+
+ public static Path getPath(FileSystemProvider nodeFileSystemProvider, URI uri) {
+ try {
+ FileSystem fileSystem = nodeFileSystemProvider.getFileSystem(uri);
+ if (fileSystem == null)
+ fileSystem = nodeFileSystemProvider.newFileSystem(uri, null);
+ String path = uri.getPath();
+ return fileSystem.getPath(path);
+ } catch (IOException e) {
+ throw new IllegalStateException("Unable to initialise file system for " + uri, e);
+ }
+ }
+
+ public static Path getPath(FileSystemProvider nodeFileSystemProvider, Node node) {
+ String workspaceName = Jcr.getWorkspaceName(node);
+ String fullPath = baseWorkspaceName.equals(workspaceName) ? Jcr.getPath(node)
+ : '/' + workspaceName + Jcr.getPath(node);
+ URI uri;
+ try {
+ uri = new URI(NodeConstants.SCHEME_NODE, null, fullPath, null);
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException("Cannot interpret " + fullPath + " as an URI", e);
+ }
+ return getPath(nodeFileSystemProvider, uri);
+ }
+
+ /** Singleton. */
+ private CmsFsUtils() {
+ }
+}
--- /dev/null
+package org.argeo.cms.internal.jcr;
+
+import java.util.Properties;
+
+import org.apache.jackrabbit.core.config.BeanConfig;
+import org.apache.jackrabbit.core.config.ConfigurationException;
+import org.apache.jackrabbit.core.config.RepositoryConfigurationParser;
+import org.apache.jackrabbit.core.config.WorkspaceSecurityConfig;
+import org.apache.jackrabbit.core.util.db.ConnectionFactory;
+import org.w3c.dom.Element;
+
+/**
+ * A {@link RepositoryConfigurationParser} providing more flexibility with
+ * classloaders.
+ */
+@SuppressWarnings("restriction")
+class CustomRepositoryConfigurationParser extends RepositoryConfigurationParser {
+ private ClassLoader classLoader = null;
+
+ public CustomRepositoryConfigurationParser(Properties variables) {
+ super(variables);
+ }
+
+ public CustomRepositoryConfigurationParser(Properties variables, ConnectionFactory connectionFactory) {
+ super(variables, connectionFactory);
+ }
+
+ @Override
+ protected RepositoryConfigurationParser createSubParser(Properties variables) {
+ Properties props = new Properties(getVariables());
+ props.putAll(variables);
+ CustomRepositoryConfigurationParser subParser = new CustomRepositoryConfigurationParser(props,
+ connectionFactory);
+ subParser.setClassLoader(classLoader);
+ return subParser;
+ }
+
+ @Override
+ public WorkspaceSecurityConfig parseWorkspaceSecurityConfig(Element parent) throws ConfigurationException {
+ WorkspaceSecurityConfig workspaceSecurityConfig = super.parseWorkspaceSecurityConfig(parent);
+ workspaceSecurityConfig.getAccessControlProviderConfig().setClassLoader(classLoader);
+ return workspaceSecurityConfig;
+ }
+
+ @Override
+ protected BeanConfig parseBeanConfig(Element parent, String name) throws ConfigurationException {
+ BeanConfig beanConfig = super.parseBeanConfig(parent, name);
+ if (beanConfig.getClassName().startsWith("org.argeo")) {
+ beanConfig.setClassLoader(classLoader);
+ }
+ return beanConfig;
+ }
+
+ public void setClassLoader(ClassLoader classLoader) {
+ this.classLoader = classLoader;
+ }
+
+}
--- /dev/null
+package org.argeo.cms.internal.jcr;
+
+/** Pre-defined Jackrabbit repository configurations. */
+enum JackrabbitType {
+ /** Local file system */
+ localfs,
+ /** Embedded Java H2 database */
+ h2,
+ /** Embedded Java H2 database in PostgreSQL compatibility mode */
+ h2_postgresql,
+ /** PostgreSQL */
+ postgresql,
+ /** PostgreSQL with datastore */
+ postgresql_ds,
+ /** PostgreSQL with cluster */
+ postgresql_cluster,
+ /** PostgreSQL with cluster and datastore */
+ postgresql_cluster_ds,
+ /** Memory */
+ memory;
+}
--- /dev/null
+package org.argeo.cms.internal.jcr;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Dictionary;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.Map;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.RepositoryFactory;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.api.NodeConstants;
+import org.argeo.api.NodeDeployment;
+import org.argeo.jackrabbit.client.ClientDavexRepositoryFactory;
+import org.argeo.jcr.JcrException;
+import org.argeo.naming.LdapAttrs;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Constants;
+import org.osgi.framework.FrameworkUtil;
+
+/** JCR specific init utilities. */
+public class JcrInitUtils {
+ private final static Log log = LogFactory.getLog(JcrInitUtils.class);
+ private final static BundleContext bundleContext = FrameworkUtil.getBundle(JcrInitUtils.class).getBundleContext();
+
+ public static void addToDeployment(NodeDeployment nodeDeployment) {
+ // node repository
+// Dictionary<String, Object> provided = null;
+ Dictionary<String, Object> provided = nodeDeployment.getProps(NodeConstants.NODE_REPOS_FACTORY_PID,
+ NodeConstants.NODE);
+ Dictionary<String, Object> nodeConfig = JcrInitUtils.getNodeRepositoryConfig(provided);
+ // node repository is mandatory
+ nodeDeployment.addFactoryDeployConfig(NodeConstants.NODE_REPOS_FACTORY_PID, nodeConfig);
+
+ // additional repositories
+// dataModels: for (DataModels.DataModel dataModel : dataModels.getNonAbstractDataModels()) {
+// if (NodeConstants.NODE_REPOSITORY.equals(dataModel.getName()))
+// continue dataModels;
+// Dictionary<String, Object> config = JcrInitUtils.getRepositoryConfig(dataModel.getName(),
+// getProps(NodeConstants.NODE_REPOS_FACTORY_PID, dataModel.getName()));
+// if (config.size() != 0)
+// putFactoryDeployConfig(NodeConstants.NODE_REPOS_FACTORY_PID, config);
+// }
+
+ }
+
+ /** Override the provided config with the framework properties */
+ public static Dictionary<String, Object> getNodeRepositoryConfig(Dictionary<String, Object> provided) {
+ Dictionary<String, Object> props = provided != null ? provided : new Hashtable<String, Object>();
+ for (RepoConf repoConf : RepoConf.values()) {
+ Object value = getFrameworkProp(NodeConstants.NODE_REPO_PROP_PREFIX + repoConf.name());
+ if (value != null) {
+ props.put(repoConf.name(), value);
+ if (log.isDebugEnabled())
+ log.debug("Set node repo configuration " + repoConf.name() + " to " + value);
+ }
+ }
+ props.put(NodeConstants.CN, NodeConstants.NODE_REPOSITORY);
+ return props;
+ }
+
+ public static Dictionary<String, Object> getRepositoryConfig(String dataModelName,
+ Dictionary<String, Object> provided) {
+ if (dataModelName.equals(NodeConstants.NODE_REPOSITORY) || dataModelName.equals(NodeConstants.EGO_REPOSITORY))
+ throw new IllegalArgumentException("Data model '" + dataModelName + "' is reserved.");
+ Dictionary<String, Object> props = provided != null ? provided : new Hashtable<String, Object>();
+ for (RepoConf repoConf : RepoConf.values()) {
+ Object value = getFrameworkProp(
+ NodeConstants.NODE_REPOS_PROP_PREFIX + dataModelName + '.' + repoConf.name());
+ if (value != null) {
+ props.put(repoConf.name(), value);
+ if (log.isDebugEnabled())
+ log.debug("Set " + dataModelName + " repo configuration " + repoConf.name() + " to " + value);
+ }
+ }
+ if (props.size() != 0)
+ props.put(NodeConstants.CN, dataModelName);
+ return props;
+ }
+
+ private static void registerRemoteInit(String uri) {
+ try {
+ Repository repository = createRemoteRepository(new URI(uri));
+ Hashtable<String, Object> properties = new Hashtable<>();
+ properties.put(NodeConstants.CN, NodeConstants.NODE_INIT);
+ properties.put(LdapAttrs.labeledURI.name(), uri);
+ properties.put(Constants.SERVICE_RANKING, -1000);
+ bundleContext.registerService(Repository.class, repository, properties);
+ } catch (RepositoryException e) {
+ throw new JcrException(e);
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException(e);
+ }
+ }
+
+ private static Repository createRemoteRepository(URI uri) throws RepositoryException {
+ RepositoryFactory repositoryFactory = new ClientDavexRepositoryFactory();
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(ClientDavexRepositoryFactory.JACKRABBIT_DAVEX_URI, uri.toString());
+ // TODO make it configurable
+ params.put(ClientDavexRepositoryFactory.JACKRABBIT_REMOTE_DEFAULT_WORKSPACE, NodeConstants.SYS_WORKSPACE);
+ return repositoryFactory.getRepository(params);
+ }
+
+ private static String getFrameworkProp(String key, String def) {
+ String value;
+ if (bundleContext != null)
+ value = bundleContext.getProperty(key);
+ else
+ value = System.getProperty(key);
+ if (value == null)
+ return def;
+ return value;
+ }
+
+ private static String getFrameworkProp(String key) {
+ return getFrameworkProp(key, null);
+ }
+
+}
--- /dev/null
+package org.argeo.cms.internal.jcr;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+import org.apache.jackrabbit.core.data.DataIdentifier;
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.core.data.FileDataStore;
+
+/**
+ * <b>experimental</b> Duplicate added entries in another directory (typically a
+ * remote mount).
+ */
+@SuppressWarnings("restriction")
+public class LocalFsDataStore extends FileDataStore {
+ String redundantPath;
+ FileDataStore redundantStore;
+
+ @Override
+ public void init(String homeDir) {
+ // init primary first
+ super.init(homeDir);
+
+ if (redundantPath != null) {
+ // redundant directory must be created first
+ // TODO implement some polling?
+ if (Files.exists(Paths.get(redundantPath))) {
+ redundantStore = new FileDataStore();
+ redundantStore.setPath(redundantPath);
+ redundantStore.init(homeDir);
+ }
+ }
+ }
+
+ @Override
+ public DataRecord addRecord(InputStream input) throws DataStoreException {
+ DataRecord dataRecord = super.addRecord(input);
+ syncRedundantRecord(dataRecord);
+ return dataRecord;
+ }
+
+ @Override
+ public DataRecord getRecord(DataIdentifier identifier) throws DataStoreException {
+ DataRecord dataRecord = super.getRecord(identifier);
+ syncRedundantRecord(dataRecord);
+ return dataRecord;
+ }
+
+ protected void syncRedundantRecord(DataRecord dataRecord) throws DataStoreException {
+ if (redundantStore == null)
+ return;
+ if (redundantStore.getRecordIfStored(dataRecord.getIdentifier()) == null) {
+ try (InputStream redundant = dataRecord.getStream()) {
+ redundantStore.addRecord(redundant);
+ } catch (IOException e) {
+ throw new DataStoreException("Cannot add redundant record.", e);
+ }
+ }
+ }
+
+ public void setRedundantPath(String redundantPath) {
+ this.redundantPath = redundantPath;
+ }
+
+}
--- /dev/null
+package org.argeo.cms.internal.jcr;
+
+import org.argeo.api.NodeConstants;
+import org.argeo.osgi.metatype.EnumAD;
+import org.argeo.osgi.metatype.EnumOCD;
+
+/** JCR repository configuration */
+public enum RepoConf implements EnumAD {
+ /** Repository type */
+ type("h2"),
+ /** Default workspace */
+ defaultWorkspace(NodeConstants.SYS_WORKSPACE),
+ /** Database URL */
+ dburl(null),
+ /** Database user */
+ dbuser(null),
+ /** Database password */
+ dbpassword(null),
+
+ /** The identifier (can be an URL locating the repo) */
+ labeledUri(null),
+ //
+ // JACKRABBIT SPECIFIC
+ //
+ /** Maximum database pool size */
+ maxPoolSize(10),
+ /** Maximum cache size in MB */
+ maxCacheMB(null),
+ /** Bundle cache size in MB */
+ bundleCacheMB(8),
+ /** Extractor pool size */
+ extractorPoolSize(0),
+ /** Search cache size */
+ searchCacheSize(1000),
+ /** Max volatile index size */
+ maxVolatileIndexSize(1048576),
+ /** Cluster id (if appropriate configuration) */
+ clusterId("default"),
+ /** Indexes base path */
+ indexesBase(null);
+
+ /** The default value. */
+ private Object def;
+ private String oid;
+
+ RepoConf(String oid, Object def) {
+ this.oid = oid;
+ this.def = def;
+ }
+
+ RepoConf(Object def) {
+ this.def = def;
+ }
+
+ public Object getDefault() {
+ return def;
+ }
+
+ @Override
+ public String getID() {
+ if (oid != null)
+ return oid;
+ return EnumAD.super.getID();
+ }
+
+ public static class OCD extends EnumOCD<RepoConf> {
+ public OCD(String locale) {
+ super(RepoConf.class, locale);
+ }
+ }
+
+}
--- /dev/null
+package org.argeo.cms.internal.jcr;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Dictionary;
+import java.util.Enumeration;
+import java.util.Properties;
+import java.util.UUID;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.core.RepositoryContext;
+import org.apache.jackrabbit.core.RepositoryImpl;
+import org.apache.jackrabbit.core.cache.CacheManager;
+import org.apache.jackrabbit.core.config.RepositoryConfig;
+import org.apache.jackrabbit.core.config.RepositoryConfigurationParser;
+import org.argeo.api.NodeConstants;
+import org.argeo.cms.jcr.internal.CmsPaths;
+import org.xml.sax.InputSource;
+
+/** Can interpret properties in order to create an actual JCR repository. */
+public class RepositoryBuilder {
+ private final static Log log = LogFactory.getLog(RepositoryBuilder.class);
+
+ public RepositoryContext createRepositoryContext(Dictionary<String, ?> properties)
+ throws RepositoryException, IOException {
+ RepositoryConfig repositoryConfig = createRepositoryConfig(properties);
+ RepositoryContext repositoryContext = createJackrabbitRepository(repositoryConfig);
+ RepositoryImpl repository = repositoryContext.getRepository();
+
+ // cache
+ Object maxCacheMbStr = prop(properties, RepoConf.maxCacheMB);
+ if (maxCacheMbStr != null) {
+ Integer maxCacheMB = Integer.parseInt(maxCacheMbStr.toString());
+ CacheManager cacheManager = repository.getCacheManager();
+ cacheManager.setMaxMemory(maxCacheMB * 1024l * 1024l);
+ cacheManager.setMaxMemoryPerCache((maxCacheMB / 4) * 1024l * 1024l);
+ }
+
+ return repositoryContext;
+ }
+
+ RepositoryConfig createRepositoryConfig(Dictionary<String, ?> properties) throws RepositoryException, IOException {
+ JackrabbitType type = JackrabbitType.valueOf(prop(properties, RepoConf.type).toString());
+ ClassLoader cl = getClass().getClassLoader();
+ final String base = "/org/argeo/cms/internal/jcr";
+ try (InputStream in = cl.getResourceAsStream(base + "/repository-" + type.name() + ".xml")) {
+ if (in == null)
+ throw new IllegalArgumentException("Repository configuration not found");
+ InputSource config = new InputSource(in);
+ Properties jackrabbitVars = getConfigurationProperties(type, properties);
+ // RepositoryConfig repositoryConfig = RepositoryConfig.create(config,
+ // jackrabbitVars);
+
+ // custom configuration parser
+ CustomRepositoryConfigurationParser parser = new CustomRepositoryConfigurationParser(jackrabbitVars);
+ parser.setClassLoader(cl);
+ RepositoryConfig repositoryConfig = parser.parseRepositoryConfig(config);
+ repositoryConfig.init();
+
+ // set the proper classloaders
+ repositoryConfig.getSecurityConfig().getSecurityManagerConfig().setClassLoader(cl);
+ repositoryConfig.getSecurityConfig().getAccessManagerConfig().setClassLoader(cl);
+// for (WorkspaceConfig workspaceConfig : repositoryConfig.getWorkspaceConfigs()) {
+// workspaceConfig.getSecurityConfig().getAccessControlProviderConfig().setClassLoader(cl);
+// }
+ return repositoryConfig;
+ }
+ }
+
+ private Properties getConfigurationProperties(JackrabbitType type, Dictionary<String, ?> properties) {
+ Properties props = new Properties();
+ for (Enumeration<String> keys = properties.keys(); keys.hasMoreElements();) {
+ String key = keys.nextElement();
+ props.put(key, properties.get(key));
+ }
+
+ // cluster id
+ // cf. https://wiki.apache.org/jackrabbit/Clustering
+ // TODO deal with multiple repos
+ String clusterId = System.getProperty("org.apache.jackrabbit.core.cluster.node_id");
+ String clusterIdProp = props.getProperty(RepoConf.clusterId.name());
+ if (clusterId != null) {
+ if (clusterIdProp != null)
+ throw new IllegalArgumentException("Cluster id defined as System properties and in deploy config");
+ props.put(RepoConf.clusterId.name(), clusterId);
+ } else {
+ clusterId = clusterIdProp;
+ }
+
+ // home
+ String homeUri = props.getProperty(RepoConf.labeledUri.name());
+ Path homePath;
+ if (homeUri == null) {
+ String cn = props.getProperty(NodeConstants.CN);
+ assert cn != null;
+ if (clusterId != null) {
+ homePath = CmsPaths.getRepoDirPath(cn + '/' + clusterId);
+ } else {
+ homePath = CmsPaths.getRepoDirPath(cn);
+ }
+ } else {
+ try {
+ URI uri = new URI(homeUri);
+ String host = uri.getHost();
+ if (host == null || host.trim().equals("")) {
+ homePath = Paths.get(uri).toAbsolutePath();
+ } else {
+ // TODO remote at this stage?
+ throw new IllegalArgumentException("Cannot manage repository path for host " + host);
+ }
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException("Invalid repository home URI", e);
+ }
+ }
+ // TODO use Jackrabbit API (?)
+ Path rootUuidPath = homePath.resolve("repository/meta/rootUUID");
+ try {
+ if (!Files.exists(rootUuidPath)) {
+ Files.createDirectories(rootUuidPath.getParent());
+ Files.write(rootUuidPath, UUID.randomUUID().toString().getBytes());
+ }
+ // File homeDir = homePath.toFile();
+ // homeDir.mkdirs();
+ } catch (IOException e) {
+ throw new RuntimeException("Cannot set up repository home " + homePath, e);
+ }
+ // home cannot be overridden
+ props.put(RepositoryConfigurationParser.REPOSITORY_HOME_VARIABLE, homePath.toString());
+
+ setProp(props, RepoConf.indexesBase, CmsPaths.getRepoIndexesBase().toString());
+ // common
+ setProp(props, RepoConf.defaultWorkspace);
+ setProp(props, RepoConf.maxPoolSize);
+ // Jackrabbit defaults
+ setProp(props, RepoConf.bundleCacheMB);
+ // See http://wiki.apache.org/jackrabbit/Search
+ setProp(props, RepoConf.extractorPoolSize);
+ setProp(props, RepoConf.searchCacheSize);
+ setProp(props, RepoConf.maxVolatileIndexSize);
+
+ // specific
+ String dburl;
+ switch (type) {
+ case h2:
+ dburl = "jdbc:h2:" + homePath.toAbsolutePath() + "/h2/repository";
+ setProp(props, RepoConf.dburl, dburl);
+ setProp(props, RepoConf.dbuser, "sa");
+ setProp(props, RepoConf.dbpassword, "");
+ break;
+ case h2_postgresql:
+ dburl = "jdbc:h2:" + homePath.toAbsolutePath() + "/h2/repository;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE";
+ setProp(props, RepoConf.dburl, dburl);
+ setProp(props, RepoConf.dbuser, "sa");
+ setProp(props, RepoConf.dbpassword, "");
+ break;
+ case postgresql:
+ case postgresql_ds:
+ case postgresql_cluster:
+ case postgresql_cluster_ds:
+ dburl = "jdbc:postgresql://localhost/demo";
+ setProp(props, RepoConf.dburl, dburl);
+ setProp(props, RepoConf.dbuser, "argeo");
+ setProp(props, RepoConf.dbpassword, "argeo");
+ break;
+ case memory:
+ break;
+ case localfs:
+ break;
+ default:
+ throw new IllegalArgumentException("Unsupported node type " + type);
+ }
+ return props;
+ }
+
+ private void setProp(Properties props, RepoConf key, String def) {
+ Object value = props.get(key.name());
+ if (value == null)
+ value = def;
+ if (value == null)
+ value = key.getDefault();
+ if (value != null)
+ props.put(key.name(), value.toString());
+ }
+
+ private void setProp(Properties props, RepoConf key) {
+ setProp(props, key, null);
+ }
+
+ private String prop(Dictionary<String, ?> properties, RepoConf key) {
+ Object value = properties.get(key.name());
+ if (value == null)
+ return key.getDefault() != null ? key.getDefault().toString() : null;
+ else
+ return value.toString();
+ }
+
+ private RepositoryContext createJackrabbitRepository(RepositoryConfig repositoryConfig) throws RepositoryException {
+ ClassLoader currentContextCl = Thread.currentThread().getContextClassLoader();
+ Thread.currentThread().setContextClassLoader(RepositoryBuilder.class.getClassLoader());
+ try {
+ long begin = System.currentTimeMillis();
+ //
+ // Actual repository creation
+ //
+ RepositoryContext repositoryContext = RepositoryContext.create(repositoryConfig);
+
+ double duration = ((double) (System.currentTimeMillis() - begin)) / 1000;
+ if (log.isDebugEnabled())
+ log.debug(
+ "Created Jackrabbit repository in " + duration + " s, home: " + repositoryConfig.getHomeDir());
+
+ return repositoryContext;
+ } finally {
+ Thread.currentThread().setContextClassLoader(currentContextCl);
+ }
+ }
+
+}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+ <!-- Shared datasource -->
+ <DataSources>
+ <DataSource name="dataSource">
+ <param name="driver" value="org.h2.Driver" />
+ <param name="url" value="${dburl}" />
+ <param name="user" value="${dbuser}" />
+ <param name="password" value="${dbpassword}" />
+ <param name="databaseType" value="h2" />
+ <param name="maxPoolSize" value="${maxPoolSize}" />
+ </DataSource>
+ </DataSources>
+
+ <!-- File system and datastore -->
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="default" />
+ <param name="schemaObjectPrefix" value="fs_" />
+ </FileSystem>
+ <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
+ <param name="path" value="${rep.home}/datastore" />
+ </DataStore>
+
+ <!-- Workspace templates -->
+ <Workspaces rootPath="${rep.home}/workspaces"
+ defaultWorkspace="${defaultWorkspace}" />
+ <Workspace name="${wsp.name}">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="default" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+<!-- <param name="tikaConfigPath" value="${indexesBase}/${cn}/tika-config.xml" /> -->
+ <param name="supportHighlighting" value="true" />
+ </SearchIndex>
+ <WorkspaceSecurity>
+ <AccessControlProvider
+ class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+ </WorkspaceSecurity>
+ </Workspace>
+
+ <!-- Versioning -->
+ <Versioning rootPath="${rep.home}/version">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="default" />
+ <param name="schemaObjectPrefix" value="fs_ver_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="pm_ver_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ </Versioning>
+
+ <!-- Indexing -->
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+<!-- <param name="tikaConfigPath" value="${indexesBase}/${cn}/tika-config.xml" /> -->
+ <param name="supportHighlighting" value="true" />
+ </SearchIndex>
+
+ <!-- Security -->
+ <Security appName="Jackrabbit">
+ <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+ workspaceName="security" />
+ <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+ </Security>
+</Repository>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+ <!-- Shared datasource -->
+ <DataSources>
+ <DataSource name="dataSource">
+ <param name="driver" value="org.h2.Driver" />
+ <param name="url" value="${dburl}" />
+ <param name="user" value="${dbuser}" />
+ <param name="password" value="${dbpassword}" />
+ <param name="databaseType" value="postgresql" />
+ <param name="maxPoolSize" value="${maxPoolSize}" />
+ </DataSource>
+ </DataSources>
+
+ <!-- File system and datastore -->
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="default" />
+ <param name="schemaObjectPrefix" value="fs_" />
+ </FileSystem>
+ <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
+ <param name="path" value="${rep.home}/datastore" />
+ </DataStore>
+
+ <!-- Workspace templates -->
+ <Workspaces rootPath="${rep.home}/workspaces"
+ defaultWorkspace="${defaultWorkspace}" />
+ <Workspace name="${wsp.name}">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="default" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+<!-- <param name="tikaConfigPath" value="${indexesBase}/${cn}/tika-config.xml" /> -->
+ <param name="supportHighlighting" value="true" />
+ </SearchIndex>
+ <WorkspaceSecurity>
+ <AccessControlProvider
+ class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+ </WorkspaceSecurity>
+ </Workspace>
+
+ <!-- Versioning -->
+ <Versioning rootPath="${rep.home}/version">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="default" />
+ <param name="schemaObjectPrefix" value="fs_ver_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="pm_ver_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ </Versioning>
+
+ <!-- Indexing -->
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+<!-- <param name="tikaConfigPath" value="${indexesBase}/${cn}/tika-config.xml" /> -->
+ <param name="supportHighlighting" value="true" />
+ </SearchIndex>
+
+ <!-- Security -->
+ <Security appName="Jackrabbit">
+ <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+ workspaceName="security" />
+ <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+ </Security>
+</Repository>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+ <!-- File system and datastore -->
+ <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
+ <param name="path" value="${rep.home}/repository" />
+ </FileSystem>
+ <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
+ <param name="path" value="${rep.home}/datastore" />
+ </DataStore>
+
+ <!-- Workspace templates -->
+ <Workspaces rootPath="${rep.home}/workspaces"
+ defaultWorkspace="${defaultWorkspace}" />
+ <Workspace name="${wsp.name}">
+ <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
+ <param name="path" value="${wsp.home}" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+ <WorkspaceSecurity>
+ <AccessControlProvider
+ class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+ </WorkspaceSecurity>
+ </Workspace>
+
+ <!-- Versioning -->
+ <Versioning rootPath="${rep.home}/version">
+ <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
+ <param name="path" value="${rep.home}/version" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ </Versioning>
+
+ <!-- Indexing -->
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+
+ <!-- Security -->
+ <Security appName="Jackrabbit">
+ <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+ workspaceName="security" />
+ <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+ </Security>
+</Repository>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+ <!-- File system and datastore -->
+ <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+
+ <!-- Workspace templates -->
+ <Workspaces rootPath="${rep.home}/workspaces"
+ defaultWorkspace="${defaultWorkspace}" configRootPath="/workspaces" />
+ <Workspace name="${wsp.name}">
+ <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+ <param name="blobFSBlockSize" value="1" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
+ <param name="directoryManagerClass"
+ value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+ </SearchIndex>
+ <WorkspaceSecurity>
+ <AccessControlProvider
+ class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+ </WorkspaceSecurity>
+ </Workspace>
+
+ <!-- Versioning -->
+ <Versioning rootPath="${rep.home}/version">
+ <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+ <param name="blobFSBlockSize" value="1" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ </Versioning>
+
+ <!-- Indexing -->
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/index" />
+ <param name="directoryManagerClass"
+ value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+ </SearchIndex>
+
+ <!-- Security -->
+ <Security appName="Jackrabbit">
+ <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+ workspaceName="security" />
+ <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+ </Security>
+</Repository>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+ <!-- Shared datasource -->
+ <DataSources>
+ <DataSource name="dataSource">
+ <param name="driver" value="org.postgresql.Driver" />
+ <param name="url" value="${dburl}" />
+ <param name="user" value="${dbuser}" />
+ <param name="password" value="${dbpassword}" />
+ <param name="databaseType" value="postgresql" />
+ <param name="maxPoolSize" value="${maxPoolSize}" />
+ </DataSource>
+ </DataSources>
+
+ <!-- File system and datastore -->
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="fs_" />
+ </FileSystem>
+
+ <!-- Workspace templates -->
+ <Workspaces rootPath="${rep.home}/workspaces"
+ defaultWorkspace="${defaultWorkspace}" />
+ <Workspace name="${wsp.name}">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+ <WorkspaceSecurity>
+ <AccessControlProvider
+ class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+ </WorkspaceSecurity>
+ </Workspace>
+
+ <!-- Versioning -->
+ <Versioning rootPath="${rep.home}/version">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="fs_ver_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="pm_ver_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ </Versioning>
+
+ <!-- Indexing -->
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+
+ <!-- Security -->
+ <Security appName="Jackrabbit">
+ <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+ workspaceName="security" />
+ <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+ </Security>
+</Repository>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+ <!-- Shared datasource -->
+ <DataSources>
+ <DataSource name="dataSource">
+ <param name="driver" value="org.postgresql.Driver" />
+ <param name="url" value="${dburl}" />
+ <param name="user" value="${dbuser}" />
+ <param name="password" value="${dbpassword}" />
+ <param name="databaseType" value="postgresql" />
+ <param name="maxPoolSize" value="${maxPoolSize}" />
+ </DataSource>
+ </DataSources>
+
+ <!-- File system and datastore -->
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="fs_" />
+ </FileSystem>
+
+ <!-- Workspace templates -->
+ <Workspaces rootPath="${rep.home}/workspaces"
+ defaultWorkspace="${defaultWorkspace}" />
+ <Workspace name="${wsp.name}">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+ <WorkspaceSecurity>
+ <AccessControlProvider
+ class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+ </WorkspaceSecurity>
+ </Workspace>
+
+ <!-- Versioning -->
+ <Versioning rootPath="${rep.home}/version">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="fs_ver_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="pm_ver_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ </Versioning>
+
+ <!-- Indexing -->
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+
+ <!-- Security -->
+ <Security appName="Jackrabbit">
+ <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+ workspaceName="security" />
+ <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+ </Security>
+
+ <!-- Clustering -->
+ <Cluster id="${clusterId}">
+ <Journal class="org.apache.jackrabbit.core.journal.DatabaseJournal">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="journal_" />
+ </Journal>
+ </Cluster>
+</Repository>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+ <!-- Shared datasource -->
+ <DataSources>
+ <DataSource name="dataSource">
+ <param name="driver" value="org.postgresql.Driver" />
+ <param name="url" value="${dburl}" />
+ <param name="user" value="${dbuser}" />
+ <param name="password" value="${dbpassword}" />
+ <param name="databaseType" value="postgresql" />
+ <param name="maxPoolSize" value="${maxPoolSize}" />
+ </DataSource>
+ </DataSources>
+
+ <!-- File system and datastore -->
+ <FileSystem
+ class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="fs_" />
+ </FileSystem>
+ <DataStore
+ class="org.argeo.cms.internal.jcr.LocalFsDataStore">
+ <param name="path" value="${rep.home}/../datastore" />
+ <param name="redundantPath" value="${rep.home}/../datastorer" />
+ </DataStore>
+
+ <!-- Workspace templates -->
+ <Workspaces rootPath="${rep.home}/workspaces"
+ defaultWorkspace="${defaultWorkspace}" />
+ <Workspace name="${wsp.name}">
+ <FileSystem
+ class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ <SearchIndex
+ class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path"
+ value="${indexesBase}/${cn}/${wsp.name}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize"
+ value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+ <WorkspaceSecurity>
+ <AccessControlProvider
+ class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+ </WorkspaceSecurity>
+ </Workspace>
+
+ <!-- Versioning -->
+ <Versioning rootPath="${rep.home}/version">
+ <FileSystem
+ class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="fs_ver_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="pm_ver_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ </Versioning>
+
+ <!-- Indexing -->
+ <SearchIndex
+ class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize"
+ value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+
+ <!-- Security -->
+ <Security appName="Jackrabbit">
+ <SecurityManager
+ class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+ workspaceName="security" />
+ <AccessManager
+ class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+ </Security>
+
+ <!-- Clustering -->
+ <Cluster id="${clusterId}" syncDelay="100">
+ <Journal
+ class="org.apache.jackrabbit.core.journal.DatabaseJournal">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="journal_" />
+ </Journal>
+ </Cluster>
+</Repository>
\ No newline at end of file
--- /dev/null
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+ <!-- Shared datasource -->
+ <DataSources>
+ <DataSource name="dataSource">
+ <param name="driver" value="org.postgresql.Driver" />
+ <param name="url" value="${dburl}" />
+ <param name="user" value="${dbuser}" />
+ <param name="password" value="${dbpassword}" />
+ <param name="databaseType" value="postgresql" />
+ <param name="maxPoolSize" value="${maxPoolSize}" />
+ </DataSource>
+ </DataSources>
+
+ <!-- File system and datastore -->
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="fs_" />
+ </FileSystem>
+ <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
+ <param name="path" value="${rep.home}/datastore" />
+ </DataStore>
+
+ <!-- Workspace templates -->
+ <Workspaces rootPath="${rep.home}/workspaces"
+ defaultWorkspace="${defaultWorkspace}" />
+ <Workspace name="${wsp.name}">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+ <WorkspaceSecurity>
+ <AccessControlProvider
+ class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+ </WorkspaceSecurity>
+ </Workspace>
+
+ <!-- Versioning -->
+ <Versioning rootPath="${rep.home}/version">
+ <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schema" value="postgresql" />
+ <param name="schemaObjectPrefix" value="fs_ver_" />
+ </FileSystem>
+ <PersistenceManager
+ class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+ <param name="dataSourceName" value="dataSource" />
+ <param name="schemaObjectPrefix" value="pm_ver_" />
+ <param name="bundleCacheSize" value="${bundleCacheMB}" />
+ </PersistenceManager>
+ </Versioning>
+
+ <!-- Indexing -->
+ <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+ <param name="path" value="${indexesBase}/${cn}/index" />
+ <param name="extractorPoolSize" value="${extractorPoolSize}" />
+ <param name="cacheSize" value="${searchCacheSize}" />
+ <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+ </SearchIndex>
+
+ <!-- Security -->
+ <Security appName="Jackrabbit">
+ <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+ workspaceName="security" />
+ <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+ </Security>
+</Repository>
\ No newline at end of file
--- /dev/null
+<argeo = 'http://www.argeo.org/ns/argeo'>
+
+// GENERIC TYPES
+[argeo:remoteRepository] > nt:unstructured
+- argeo:uri (STRING)
+- argeo:userID (STRING)
++ argeo:password (argeo:encrypted)
+
+// TABULAR CONTENT
+[argeo:table] > nt:file
++ * (argeo:column) *
+
+[argeo:column] > mix:title
+- jcr:requiredType (STRING) = 'STRING'
+
+[argeo:csv] > nt:resource
+
+// CRYPTO
+[argeo:encrypted]
+mixin
+// initialization vector used by some algorithms
+- argeo:iv (BINARY)
+
+[argeo:pbeKeySpec]
+mixin
+- argeo:secretKeyFactory (STRING)
+- argeo:salt (BINARY)
+- argeo:iterationCount (LONG)
+- argeo:keyLength (LONG)
+- argeo:secretKeyEncryption (STRING)
+
+[argeo:pbeSpec] > argeo:pbeKeySpec
+mixin
+- argeo:cipher (STRING)
--- /dev/null
+// DN (see https://tools.ietf.org/html/rfc4514)
+<cn = 'http://www.argeo.org/ns/rfc4514/cn'>
+<l = 'http://www.argeo.org/ns/rfc4514/l'>
+<st = 'http://www.argeo.org/ns/rfc4514/st'>
+<o = 'http://www.argeo.org/ns/rfc4514/o'>
+<ou = 'http://www.argeo.org/ns/rfc4514/ou'>
+<c = 'http://www.argeo.org/ns/rfc4514/c'>
+<street = 'http://www.argeo.org/ns/rfc4514/street'>
+<dc = 'http://www.argeo.org/ns/rfc4514/dc'>
+<uid = 'http://www.argeo.org/ns/rfc4514/uid'>
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.FileSystem;
+import java.nio.file.FileSystemAlreadyExistsException;
+import java.nio.file.Path;
+import java.nio.file.spi.FileSystemProvider;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.jcr.Node;
+import javax.jcr.Property;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.RepositoryFactory;
+import javax.jcr.Session;
+import javax.jcr.nodetype.NodeType;
+
+import org.argeo.api.NodeConstants;
+import org.argeo.api.NodeUtils;
+import org.argeo.cms.auth.CurrentUser;
+import org.argeo.jackrabbit.fs.AbstractJackrabbitFsProvider;
+import org.argeo.jcr.fs.JcrFileSystem;
+import org.argeo.jcr.fs.JcrFileSystemProvider;
+import org.argeo.jcr.fs.JcrFsException;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.FrameworkUtil;
+import org.osgi.framework.InvalidSyntaxException;
+
+/** Implementation of an {@link FileSystemProvider} based on Jackrabbit. */
+public class CmsFsProvider extends AbstractJackrabbitFsProvider {
+ private Map<String, CmsFileSystem> fileSystems = new HashMap<>();
+
+ @Override
+ public String getScheme() {
+ return NodeConstants.SCHEME_NODE;
+ }
+
+ @Override
+ public FileSystem newFileSystem(URI uri, Map<String, ?> env) throws IOException {
+ BundleContext bc = FrameworkUtil.getBundle(CmsFsProvider.class).getBundleContext();
+ String username = CurrentUser.getUsername();
+ if (username == null) {
+ // TODO deal with anonymous
+ return null;
+ }
+ if (fileSystems.containsKey(username))
+ throw new FileSystemAlreadyExistsException("CMS file system already exists for user " + username);
+
+ try {
+ String host = uri.getHost();
+ if (host != null && !host.trim().equals("")) {
+ URI repoUri = new URI("http", uri.getUserInfo(), uri.getHost(), uri.getPort(), "/jcr/node", null, null);
+ RepositoryFactory repositoryFactory = bc.getService(bc.getServiceReference(RepositoryFactory.class));
+ Repository repository = NodeUtils.getRepositoryByUri(repositoryFactory, repoUri.toString());
+ CmsFileSystem fileSystem = new CmsFileSystem(this, repository);
+ fileSystems.put(username, fileSystem);
+ return fileSystem;
+ } else {
+ Repository repository = bc.getService(
+ bc.getServiceReferences(Repository.class, "(cn=" + NodeConstants.EGO_REPOSITORY + ")")
+ .iterator().next());
+// Session session = repository.login();
+ CmsFileSystem fileSystem = new CmsFileSystem(this, repository);
+ fileSystems.put(username, fileSystem);
+ return fileSystem;
+ }
+ } catch (InvalidSyntaxException | URISyntaxException e) {
+ throw new IllegalArgumentException("Cannot open file system " + uri + " for user " + username, e);
+ }
+ }
+
+ @Override
+ public FileSystem getFileSystem(URI uri) {
+ return currentUserFileSystem();
+ }
+
+ @Override
+ public Path getPath(URI uri) {
+ JcrFileSystem fileSystem = currentUserFileSystem();
+ String path = uri.getPath();
+ if (fileSystem == null)
+ try {
+ fileSystem = (JcrFileSystem) newFileSystem(uri, new HashMap<String, Object>());
+ } catch (IOException e) {
+ throw new JcrFsException("Could not autocreate file system", e);
+ }
+ return fileSystem.getPath(path);
+ }
+
+ protected JcrFileSystem currentUserFileSystem() {
+ String username = CurrentUser.getUsername();
+ return fileSystems.get(username);
+ }
+
+ public Node getUserHome(Repository repository) {
+ try {
+ Session session = repository.login(NodeConstants.HOME_WORKSPACE);
+ return NodeUtils.getUserHome(session);
+ } catch (RepositoryException e) {
+ throw new IllegalStateException("Cannot get user home", e);
+ }
+ }
+
+ static class CmsFileSystem extends JcrFileSystem {
+ public CmsFileSystem(JcrFileSystemProvider provider, Repository repository) throws IOException {
+ super(provider, repository);
+ }
+
+ public boolean skipNode(Node node) throws RepositoryException {
+// if (node.isNodeType(NodeType.NT_HIERARCHY_NODE) || node.isNodeType(NodeTypes.NODE_USER_HOME)
+// || node.isNodeType(NodeTypes.NODE_GROUP_HOME))
+ if (node.isNodeType(NodeType.NT_HIERARCHY_NODE))
+ return false;
+ // FIXME Better identifies home
+ if (node.hasProperty(Property.JCR_ID))
+ return false;
+ return true;
+ }
+
+ }
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.nio.file.Path;
+
+/** Centralises access to the default node deployment directories. */
+public class CmsPaths {
+ public static Path getRepoDirPath(String cn) {
+ return KernelUtils.getOsgiInstancePath(KernelConstants.DIR_REPOS + '/' + cn);
+ }
+
+ public static Path getRepoIndexesBase() {
+ return KernelUtils.getOsgiInstancePath(KernelConstants.DIR_INDEXES);
+ }
+
+ /** Singleton. */
+ private CmsPaths() {
+ }
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.util.GregorianCalendar;
+import java.util.concurrent.LinkedBlockingDeque;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import javax.jcr.Node;
+import javax.jcr.Property;
+import javax.jcr.PropertyType;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.Value;
+import javax.jcr.nodetype.NodeType;
+import javax.jcr.observation.Event;
+import javax.jcr.observation.EventIterator;
+import javax.jcr.observation.EventListener;
+import javax.jcr.version.VersionManager;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.api.JackrabbitValue;
+import org.apache.jackrabbit.core.RepositoryImpl;
+import org.argeo.jcr.JcrUtils;
+
+/** Ensure consistency of files, folder and last modified nodes. */
+class CmsWorkspaceIndexer implements EventListener {
+ private final static Log log = LogFactory.getLog(CmsWorkspaceIndexer.class);
+
+// private final static String MIX_ETAG = "mix:etag";
+ private final static String JCR_ETAG = "jcr:etag";
+// private final static String JCR_LAST_MODIFIED = "jcr:lastModified";
+// private final static String JCR_LAST_MODIFIED_BY = "jcr:lastModifiedBy";
+// private final static String JCR_MIXIN_TYPES = "jcr:mixinTypes";
+ private final static String JCR_DATA = "jcr:data";
+ private final static String JCR_CONTENT = "jcr:data";
+
+ private String cn;
+ private String workspaceName;
+ private RepositoryImpl repositoryImpl;
+ private Session session;
+ private VersionManager versionManager;
+
+ private LinkedBlockingDeque<Event> toProcess = new LinkedBlockingDeque<>();
+ private IndexingThread indexingThread;
+ private AtomicBoolean stopping = new AtomicBoolean(false);
+
+ public CmsWorkspaceIndexer(RepositoryImpl repositoryImpl, String cn, String workspaceName)
+ throws RepositoryException {
+ this.cn = cn;
+ this.workspaceName = workspaceName;
+ this.repositoryImpl = repositoryImpl;
+ }
+
+ public void init() {
+ session = KernelUtils.openAdminSession(repositoryImpl, workspaceName);
+ try {
+ String[] nodeTypes = { NodeType.NT_FILE, NodeType.MIX_LAST_MODIFIED };
+ session.getWorkspace().getObservationManager().addEventListener(this,
+ Event.NODE_ADDED | Event.PROPERTY_CHANGED, "/", true, null, nodeTypes, true);
+ versionManager = session.getWorkspace().getVersionManager();
+
+ indexingThread = new IndexingThread();
+ indexingThread.start();
+ } catch (RepositoryException e1) {
+ throw new IllegalStateException(e1);
+ }
+ }
+
+ public void destroy() {
+ stopping.set(true);
+ indexingThread.interrupt();
+ // TODO make it configurable
+ try {
+ indexingThread.join(10 * 60 * 1000);
+ } catch (InterruptedException e1) {
+ log.warn("Indexing thread interrupted. Will log out session.");
+ }
+
+ try {
+ session.getWorkspace().getObservationManager().removeEventListener(this);
+ } catch (RepositoryException e) {
+ if (log.isTraceEnabled())
+ log.warn("Cannot unregistered JCR event listener", e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ private synchronized void processEvents(EventIterator events) {
+ long begin = System.currentTimeMillis();
+ long count = 0;
+ while (events.hasNext()) {
+ Event event = events.nextEvent();
+ try {
+ toProcess.put(event);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+// processEvent(event);
+ count++;
+ }
+ long duration = System.currentTimeMillis() - begin;
+ if (log.isTraceEnabled())
+ log.trace("Processed " + count + " events in " + duration + " ms");
+ notifyAll();
+ }
+
+ protected synchronized void processEvent(Event event) {
+ try {
+ String eventPath = event.getPath();
+ if (event.getType() == Event.NODE_ADDED) {
+ if (!versionManager.isCheckedOut(eventPath))
+ return;// ignore checked-in nodes
+ if (log.isTraceEnabled())
+ log.trace("NODE_ADDED " + eventPath);
+// session.refresh(true);
+ session.refresh(false);
+ Node node = session.getNode(eventPath);
+ Node parentNode = node.getParent();
+ if (parentNode.isNodeType(NodeType.NT_FILE)) {
+ if (node.isNodeType(NodeType.NT_UNSTRUCTURED)) {
+ if (!node.isNodeType(NodeType.MIX_LAST_MODIFIED))
+ node.addMixin(NodeType.MIX_LAST_MODIFIED);
+ Property property = node.getProperty(Property.JCR_DATA);
+ String etag = toEtag(property.getValue());
+ session.save();
+ node.setProperty(JCR_ETAG, etag);
+ if (log.isTraceEnabled())
+ log.trace("ETag and last modified added to new " + node);
+ } else if (node.isNodeType(NodeType.NT_RESOURCE)) {
+// if (!node.isNodeType(MIX_ETAG))
+// node.addMixin(MIX_ETAG);
+// session.save();
+// Property property = node.getProperty(Property.JCR_DATA);
+// String etag = toEtag(property.getValue());
+// node.setProperty(JCR_ETAG, etag);
+// session.save();
+ }
+// setLastModifiedRecursive(parentNode, event);
+// session.save();
+// if (log.isTraceEnabled())
+// log.trace("ETag and last modified added to new " + node);
+ }
+
+// if (node.isNodeType(NodeType.NT_FOLDER)) {
+// setLastModifiedRecursive(node, event);
+// session.save();
+// if (log.isTraceEnabled())
+// log.trace("Last modified added to new " + node);
+// }
+ } else if (event.getType() == Event.PROPERTY_CHANGED) {
+ String propertyName = extractItemName(eventPath);
+ // skip if last modified properties are explicitly set
+ if (!propertyName.equals(JCR_DATA))
+ return;
+// if (propertyName.equals(JCR_LAST_MODIFIED))
+// return;
+// if (propertyName.equals(JCR_LAST_MODIFIED_BY))
+// return;
+// if (propertyName.equals(JCR_MIXIN_TYPES))
+// return;
+// if (propertyName.equals(JCR_ETAG))
+// return;
+
+ if (log.isTraceEnabled())
+ log.trace("PROPERTY_CHANGED " + eventPath);
+
+ if (!session.propertyExists(eventPath))
+ return;
+ session.refresh(false);
+ Property property = session.getProperty(eventPath);
+ Node node = property.getParent();
+ if (property.getType() == PropertyType.BINARY && propertyName.equals(JCR_DATA)
+ && node.isNodeType(NodeType.NT_UNSTRUCTURED)) {
+ String etag = toEtag(property.getValue());
+ node.setProperty(JCR_ETAG, etag);
+ Node parentNode = node.getParent();
+ if (parentNode.isNodeType(NodeType.MIX_LAST_MODIFIED)) {
+ setLastModified(parentNode, event);
+ }
+ if (log.isTraceEnabled())
+ log.trace("ETag and last modified updated for " + node);
+ }
+// setLastModified(node, event);
+// session.save();
+// if (log.isTraceEnabled())
+// log.trace("ETag and last modified updated for " + node);
+ } else if (event.getType() == Event.NODE_REMOVED) {
+ String removeNodePath = eventPath;
+ String nodeName = extractItemName(eventPath);
+ if (JCR_CONTENT.equals(nodeName)) // parent is a file, deleted anyhow
+ return;
+ if (log.isTraceEnabled())
+ log.trace("NODE_REMOVED " + eventPath);
+// String parentPath = JcrUtils.parentPath(removeNodePath);
+// session.refresh(true);
+// setLastModified(parentPath, event);
+// session.save();
+ if (log.isTraceEnabled())
+ log.trace("Last modified updated for parents of removed " + removeNodePath);
+ }
+ } catch (Exception e) {
+ if (log.isTraceEnabled())
+ log.warn("Cannot process event " + event, e);
+ } finally {
+// try {
+// session.refresh(true);
+// if (session.hasPendingChanges())
+// session.save();
+//// session.refresh(false);
+// } catch (RepositoryException e) {
+// if (log.isTraceEnabled())
+// log.warn("Cannot refresh JCR session", e);
+// }
+ }
+
+ }
+
+ private String extractItemName(String path) {
+ if (path == null || path.length() <= 1)
+ return null;
+ int lastIndex = path.lastIndexOf('/');
+ if (lastIndex >= 0) {
+ return path.substring(lastIndex + 1);
+ } else {
+ return path;
+ }
+ }
+
+ @Override
+ public void onEvent(EventIterator events) {
+ processEvents(events);
+// Runnable toRun = new Runnable() {
+//
+// @Override
+// public void run() {
+// processEvents(events);
+// }
+// };
+// Future<?> future = Activator.getInternalExecutorService().submit(toRun);
+// try {
+// // make the call synchronous
+// future.get(60, TimeUnit.SECONDS);
+// } catch (TimeoutException | ExecutionException | InterruptedException e) {
+// // silent
+// }
+ }
+
+ static String toEtag(Value v) {
+ if (v instanceof JackrabbitValue) {
+ JackrabbitValue value = (JackrabbitValue) v;
+ return '\"' + value.getContentIdentity() + '\"';
+ } else {
+ return null;
+ }
+
+ }
+
+ protected synchronized void setLastModified(Node node, Event event) throws RepositoryException {
+ GregorianCalendar calendar = new GregorianCalendar();
+ calendar.setTimeInMillis(event.getDate());
+ node.setProperty(Property.JCR_LAST_MODIFIED, calendar);
+ node.setProperty(Property.JCR_LAST_MODIFIED_BY, event.getUserID());
+ if (log.isTraceEnabled())
+ log.trace("Last modified set on " + node);
+ }
+
+ /** Recursively set the last updated time on parents. */
+ protected synchronized void setLastModifiedRecursive(Node node, Event event) throws RepositoryException {
+ if (versionManager.isCheckedOut(node.getPath())) {
+ if (node.isNodeType(NodeType.MIX_LAST_MODIFIED)) {
+ setLastModified(node, event);
+ }
+ if (node.isNodeType(NodeType.NT_FOLDER) && !node.isNodeType(NodeType.MIX_LAST_MODIFIED)) {
+ node.addMixin(NodeType.MIX_LAST_MODIFIED);
+ if (log.isTraceEnabled())
+ log.trace("Last modified mix-in added to " + node);
+ }
+
+ }
+
+ // end condition
+ if (node.getDepth() == 0) {
+// try {
+// node.getSession().save();
+// } catch (RepositoryException e) {
+// log.warn("Cannot index workspace", e);
+// }
+ return;
+ } else {
+ Node parent = node.getParent();
+ setLastModifiedRecursive(parent, event);
+ }
+ }
+
+ /**
+ * Recursively set the last updated time on parents. Useful to use paths when
+ * dealing with deletions.
+ */
+ protected synchronized void setLastModifiedRecursive(String path, Event event) throws RepositoryException {
+ // root node will always exist, so end condition is delegated to the other
+ // recursive setLastModified method
+ if (session.nodeExists(path)) {
+ setLastModifiedRecursive(session.getNode(path), event);
+ } else {
+ setLastModifiedRecursive(JcrUtils.parentPath(path), event);
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "Indexer for workspace " + workspaceName + " of repository " + cn;
+ }
+
+ class IndexingThread extends Thread {
+
+ public IndexingThread() {
+ super(CmsWorkspaceIndexer.this.toString());
+ // TODO Auto-generated constructor stub
+ }
+
+ @Override
+ public void run() {
+ life: while (session != null && session.isLive()) {
+ try {
+ Event nextEvent = toProcess.take();
+ processEvent(nextEvent);
+ } catch (InterruptedException e) {
+ // silent
+ interrupted();
+ }
+
+ if (stopping.get() && toProcess.isEmpty()) {
+ break life;
+ }
+ }
+ if (log.isDebugEnabled())
+ log.debug(CmsWorkspaceIndexer.this.toString() + " has shut down.");
+ }
+
+ }
+
+}
\ No newline at end of file
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import static org.argeo.api.DataModelNamespace.CMS_DATA_MODEL_NAMESPACE;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.api.DataModelNamespace;
+import org.argeo.cms.CmsException;
+import org.osgi.framework.Bundle;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.BundleEvent;
+import org.osgi.framework.BundleListener;
+import org.osgi.framework.wiring.BundleCapability;
+import org.osgi.framework.wiring.BundleWire;
+import org.osgi.framework.wiring.BundleWiring;
+
+class DataModels implements BundleListener {
+ private final static Log log = LogFactory.getLog(DataModels.class);
+
+ private Map<String, DataModel> dataModels = new TreeMap<>();
+
+ public DataModels(BundleContext bc) {
+ for (Bundle bundle : bc.getBundles())
+ processBundle(bundle, null);
+ bc.addBundleListener(this);
+ }
+
+ public List<DataModel> getNonAbstractDataModels() {
+ List<DataModel> res = new ArrayList<>();
+ for (String name : dataModels.keySet()) {
+ DataModel dataModel = dataModels.get(name);
+ if (!dataModel.isAbstract())
+ res.add(dataModel);
+ }
+ // TODO reorder?
+ return res;
+ }
+
+ @Override
+ public void bundleChanged(BundleEvent event) {
+ if (event.getType() == Bundle.RESOLVED) {
+ processBundle(event.getBundle(), null);
+ } else if (event.getType() == Bundle.UNINSTALLED) {
+ BundleWiring wiring = event.getBundle().adapt(BundleWiring.class);
+ List<BundleCapability> providedDataModels = wiring.getCapabilities(CMS_DATA_MODEL_NAMESPACE);
+ if (providedDataModels.size() == 0)
+ return;
+ for (BundleCapability bundleCapability : providedDataModels) {
+ dataModels.remove(bundleCapability.getAttributes().get(DataModelNamespace.NAME));
+ }
+ }
+
+ }
+
+ protected void processBundle(Bundle bundle, List<Bundle> scannedBundles) {
+ if (scannedBundles != null && scannedBundles.contains(bundle))
+ throw new IllegalStateException("Cycle in CMS data model requirements for " + bundle);
+ BundleWiring wiring = bundle.adapt(BundleWiring.class);
+ if (wiring == null) {
+ int bundleState = bundle.getState();
+ if (bundleState != Bundle.INSTALLED && bundleState != Bundle.UNINSTALLED) {// ignore unresolved bundles
+ log.warn("Bundle " + bundle.getSymbolicName() + " #" + bundle.getBundleId() + " ("
+ + bundle.getLocation() + ") cannot be adapted to a wiring");
+ } else {
+ if (log.isTraceEnabled())
+ log.warn("Bundle " + bundle.getSymbolicName() + " is not resolved.");
+ }
+ return;
+ }
+ List<BundleCapability> providedDataModels = wiring.getCapabilities(CMS_DATA_MODEL_NAMESPACE);
+ if (providedDataModels.size() == 0)
+ return;
+ List<BundleWire> requiredDataModels = wiring.getRequiredWires(CMS_DATA_MODEL_NAMESPACE);
+ // process requirements first
+ for (BundleWire bundleWire : requiredDataModels) {
+ List<Bundle> nextScannedBundles = new ArrayList<>();
+ if (scannedBundles != null)
+ nextScannedBundles.addAll(scannedBundles);
+ nextScannedBundles.add(bundle);
+ Bundle providerBundle = bundleWire.getProvider().getBundle();
+ processBundle(providerBundle, nextScannedBundles);
+ }
+ for (BundleCapability bundleCapability : providedDataModels) {
+ String name = (String) bundleCapability.getAttributes().get(DataModelNamespace.NAME);
+ assert name != null;
+ if (!dataModels.containsKey(name)) {
+ DataModel dataModel = new DataModel(name, bundleCapability, requiredDataModels);
+ dataModels.put(dataModel.getName(), dataModel);
+ }
+ }
+ }
+
+ /** Return a negative depth if dataModel is required by ref, 0 otherwise. */
+ static int required(DataModel ref, DataModel dataModel, int depth) {
+ for (DataModel dm : ref.getRequired()) {
+ if (dm.equals(dataModel))// found here
+ return depth - 1;
+ int d = required(dm, dataModel, depth - 1);
+ if (d != 0)// found deeper
+ return d;
+ }
+ return 0;// not found
+ }
+
+ class DataModel {
+ private final String name;
+ private final boolean abstrct;
+ // private final boolean standalone;
+ private final String cnd;
+ private final List<DataModel> required;
+
+ private DataModel(String name, BundleCapability bundleCapability, List<BundleWire> requiredDataModels) {
+ assert CMS_DATA_MODEL_NAMESPACE.equals(bundleCapability.getNamespace());
+ this.name = name;
+ Map<String, Object> attrs = bundleCapability.getAttributes();
+ abstrct = KernelUtils.asBoolean((String) attrs.get(DataModelNamespace.ABSTRACT));
+ // standalone = KernelUtils.asBoolean((String)
+ // attrs.get(DataModelNamespace.CAPABILITY_STANDALONE_ATTRIBUTE));
+ cnd = (String) attrs.get(DataModelNamespace.CND);
+ List<DataModel> req = new ArrayList<>();
+ for (BundleWire wire : requiredDataModels) {
+ String requiredDataModelName = (String) wire.getCapability().getAttributes()
+ .get(DataModelNamespace.NAME);
+ assert requiredDataModelName != null;
+ DataModel requiredDataModel = dataModels.get(requiredDataModelName);
+ if (requiredDataModel == null)
+ throw new CmsException("No required data model " + requiredDataModelName);
+ req.add(requiredDataModel);
+ }
+ required = Collections.unmodifiableList(req);
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public boolean isAbstract() {
+ return abstrct;
+ }
+
+ // public boolean isStandalone() {
+ // return !isAbstract();
+ // }
+
+ public String getCnd() {
+ return cnd;
+ }
+
+ public List<DataModel> getRequired() {
+ return required;
+ }
+
+ // @Override
+ // public int compareTo(DataModel o) {
+ // if (equals(o))
+ // return 0;
+ // int res = required(this, o, 0);
+ // if (res != 0)
+ // return res;
+ // // the other way round
+ // res = required(o, this, 0);
+ // if (res != 0)
+ // return -res;
+ // return 0;
+ // }
+
+ @Override
+ public int hashCode() {
+ return name.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj instanceof DataModel)
+ return ((DataModel) obj).name.equals(name);
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ return "Data model " + name;
+ }
+
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.security.PrivilegedAction;
+import java.text.SimpleDateFormat;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.jcr.Node;
+import javax.jcr.Property;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.nodetype.NodeType;
+import javax.jcr.security.Privilege;
+import javax.naming.InvalidNameException;
+import javax.naming.ldap.LdapName;
+import javax.security.auth.Subject;
+import javax.security.auth.login.LoginContext;
+
+import org.argeo.api.NodeConstants;
+import org.argeo.api.NodeUtils;
+import org.argeo.cms.CmsException;
+import org.argeo.jcr.JcrException;
+import org.argeo.jcr.JcrRepositoryWrapper;
+import org.argeo.jcr.JcrUtils;
+
+/**
+ * Make sure each user has a home directory available.
+ */
+class EgoRepository extends JcrRepositoryWrapper implements KernelConstants {
+
+ /** The home base path. */
+// private String homeBasePath = KernelConstants.DEFAULT_HOME_BASE_PATH;
+// private String usersBasePath = KernelConstants.DEFAULT_USERS_BASE_PATH;
+// private String groupsBasePath = KernelConstants.DEFAULT_GROUPS_BASE_PATH;
+
+ private Set<String> checkedUsers = new HashSet<String>();
+
+ private SimpleDateFormat usersDatePath = new SimpleDateFormat("YYYY/MM");
+
+ private String defaultHomeWorkspace = NodeConstants.HOME_WORKSPACE;
+ private String defaultGroupsWorkspace = NodeConstants.SRV_WORKSPACE;
+// private String defaultGuestsWorkspace = NodeConstants.GUESTS_WORKSPACE;
+ private final boolean remote;
+
+ public EgoRepository(Repository repository, boolean remote) {
+ super(repository);
+ this.remote = remote;
+ putDescriptor(NodeConstants.CN, NodeConstants.EGO_REPOSITORY);
+ if (!remote) {
+ LoginContext lc;
+ try {
+ lc = new LoginContext(NodeConstants.LOGIN_CONTEXT_DATA_ADMIN);
+ lc.login();
+ } catch (javax.security.auth.login.LoginException e1) {
+ throw new IllegalStateException("Cannot login as system", e1);
+ }
+ Subject.doAs(lc.getSubject(), new PrivilegedAction<Void>() {
+
+ @Override
+ public Void run() {
+ loginOrCreateWorkspace(defaultHomeWorkspace);
+ loginOrCreateWorkspace(defaultGroupsWorkspace);
+ return null;
+ }
+
+ });
+ }
+ }
+
+ private void loginOrCreateWorkspace(String workspace) {
+ Session adminSession = null;
+ try {
+ adminSession = JcrUtils.loginOrCreateWorkspace(getRepository(workspace), workspace);
+// JcrUtils.addPrivilege(adminSession, "/", NodeConstants.ROLE_USER, Privilege.JCR_READ);
+
+// initJcr(adminSession);
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot init JCR home", e);
+ } finally {
+ JcrUtils.logoutQuietly(adminSession);
+ }
+ }
+
+// @Override
+// public Session login(Credentials credentials, String workspaceName)
+// throws LoginException, NoSuchWorkspaceException, RepositoryException {
+// if (workspaceName == null) {
+// return super.login(credentials, getUserHomeWorkspace());
+// } else {
+// return super.login(credentials, workspaceName);
+// }
+// }
+
+ protected String getUserHomeWorkspace() {
+ // TODO base on JAAS Subject metadata
+ return defaultHomeWorkspace;
+ }
+
+ protected String getGroupsWorkspace() {
+ // TODO base on JAAS Subject metadata
+ return defaultGroupsWorkspace;
+ }
+
+// protected String getGuestsWorkspace() {
+// // TODO base on JAAS Subject metadata
+// return defaultGuestsWorkspace;
+// }
+
+ @Override
+ protected void processNewSession(Session session, String workspaceName) {
+ String username = session.getUserID();
+ if (username == null || username.toString().equals(""))
+ return;
+ if (session.getUserID().equals(NodeConstants.ROLE_ANONYMOUS))
+ return;
+
+ String userHomeWorkspace = getUserHomeWorkspace();
+ if (workspaceName == null || !workspaceName.equals(userHomeWorkspace))
+ return;
+
+ if (checkedUsers.contains(username))
+ return;
+ Session adminSession = KernelUtils.openAdminSession(getRepository(workspaceName), workspaceName);
+ try {
+ syncJcr(adminSession, username);
+ checkedUsers.add(username);
+ } finally {
+ JcrUtils.logoutQuietly(adminSession);
+ }
+ }
+
+ /*
+ * JCR
+ */
+ /** Session is logged out. */
+ private void initJcr(Session adminSession) {
+ try {
+// JcrUtils.mkdirs(adminSession, homeBasePath);
+// JcrUtils.mkdirs(adminSession, groupsBasePath);
+ adminSession.save();
+
+// JcrUtils.addPrivilege(adminSession, homeBasePath, NodeConstants.ROLE_USER_ADMIN, Privilege.JCR_READ);
+// JcrUtils.addPrivilege(adminSession, groupsBasePath, NodeConstants.ROLE_USER_ADMIN, Privilege.JCR_READ);
+ adminSession.save();
+ } catch (RepositoryException e) {
+ throw new CmsException("Cannot initialize home repository", e);
+ } finally {
+ JcrUtils.logoutQuietly(adminSession);
+ }
+ }
+
+ protected synchronized void syncJcr(Session adminSession, String username) {
+ // only in the default workspace
+// if (workspaceName != null)
+// return;
+ // skip system users
+ if (username.endsWith(NodeConstants.ROLES_BASEDN))
+ return;
+
+ try {
+ Node userHome = NodeUtils.getUserHome(adminSession, username);
+ if (userHome == null) {
+// String homePath = generateUserPath(username);
+ String userId = extractUserId(username);
+// if (adminSession.itemExists(homePath))// duplicate user id
+// userHome = adminSession.getNode(homePath).getParent().addNode(JcrUtils.lastPathElement(homePath));
+// else
+// userHome = JcrUtils.mkdirs(adminSession, homePath);
+ userHome = adminSession.getRootNode().addNode(userId);
+// userHome.addMixin(NodeTypes.NODE_USER_HOME);
+ userHome.addMixin(NodeType.MIX_CREATED);
+ userHome.addMixin(NodeType.MIX_TITLE);
+ userHome.setProperty(Property.JCR_ID, username);
+ // TODO use display name
+ userHome.setProperty(Property.JCR_TITLE, userId);
+// userHome.setProperty(NodeNames.LDAP_UID, username);
+ adminSession.save();
+
+ JcrUtils.clearAccessControList(adminSession, userHome.getPath(), username);
+ JcrUtils.addPrivilege(adminSession, userHome.getPath(), username, Privilege.JCR_ALL);
+// JackrabbitSecurityUtils.denyPrivilege(adminSession, userHome.getPath(), NodeConstants.ROLE_USER,
+// Privilege.JCR_READ);
+ }
+ if (adminSession.hasPendingChanges())
+ adminSession.save();
+ } catch (RepositoryException e) {
+ JcrUtils.discardQuietly(adminSession);
+ throw new JcrException("Cannot sync node security model for " + username, e);
+ }
+ }
+
+ /** Generate path for a new user home */
+ private String generateUserPath(String username) {
+ LdapName dn;
+ try {
+ dn = new LdapName(username);
+ } catch (InvalidNameException e) {
+ throw new CmsException("Invalid name " + username, e);
+ }
+ String userId = dn.getRdn(dn.size() - 1).getValue().toString();
+ return '/' + userId;
+// int atIndex = userId.indexOf('@');
+// if (atIndex < 0) {
+// return homeBasePath+'/' + userId;
+// } else {
+// return usersBasePath + '/' + usersDatePath.format(new Date()) + '/' + userId;
+// }
+ }
+
+ private String extractUserId(String username) {
+ LdapName dn;
+ try {
+ dn = new LdapName(username);
+ } catch (InvalidNameException e) {
+ throw new CmsException("Invalid name " + username, e);
+ }
+ String userId = dn.getRdn(dn.size() - 1).getValue().toString();
+ return userId;
+// int atIndex = userId.indexOf('@');
+// if (atIndex < 0) {
+// return homeBasePath+'/' + userId;
+// } else {
+// return usersBasePath + '/' + usersDatePath.format(new Date()) + '/' + userId;
+// }
+ }
+
+ public void createWorkgroup(LdapName dn) {
+ String groupsWorkspace = getGroupsWorkspace();
+ Session adminSession = KernelUtils.openAdminSession(getRepository(groupsWorkspace), groupsWorkspace);
+ String cn = dn.getRdn(dn.size() - 1).getValue().toString();
+ Node newWorkgroup = NodeUtils.getGroupHome(adminSession, cn);
+ if (newWorkgroup != null) {
+ JcrUtils.logoutQuietly(adminSession);
+ throw new CmsException("Workgroup " + newWorkgroup + " already exists for " + dn);
+ }
+ try {
+ // TODO enhance transformation of cn to a valid node name
+ // String relPath = cn.replaceAll("[^a-zA-Z0-9]", "_");
+ String relPath = JcrUtils.replaceInvalidChars(cn);
+ newWorkgroup = adminSession.getRootNode().addNode(relPath, NodeType.NT_UNSTRUCTURED);
+// newWorkgroup = JcrUtils.mkdirs(adminSession.getNode(groupsBasePath), relPath, NodeType.NT_UNSTRUCTURED);
+// newWorkgroup.addMixin(NodeTypes.NODE_GROUP_HOME);
+ newWorkgroup.addMixin(NodeType.MIX_CREATED);
+ newWorkgroup.addMixin(NodeType.MIX_TITLE);
+ newWorkgroup.setProperty(Property.JCR_ID, dn.toString());
+ newWorkgroup.setProperty(Property.JCR_TITLE, cn);
+// newWorkgroup.setProperty(NodeNames.LDAP_CN, cn);
+ adminSession.save();
+ JcrUtils.addPrivilege(adminSession, newWorkgroup.getPath(), dn.toString(), Privilege.JCR_ALL);
+ adminSession.save();
+ } catch (RepositoryException e) {
+ throw new CmsException("Cannot create workgroup", e);
+ } finally {
+ JcrUtils.logoutQuietly(adminSession);
+ }
+
+ }
+
+ public boolean isRemote() {
+ return remote;
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.util.Map;
+import java.util.TreeMap;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.core.RepositoryImpl;
+import org.argeo.api.NodeConstants;
+
+class JackrabbitLocalRepository extends LocalRepository {
+ private final static Log log = LogFactory.getLog(JackrabbitLocalRepository.class);
+ final String SECURITY_WORKSPACE = "security";
+
+ private Map<String, CmsWorkspaceIndexer> workspaceMonitors = new TreeMap<>();
+
+ public JackrabbitLocalRepository(RepositoryImpl repository, String cn) {
+ super(repository, cn);
+// Session session = KernelUtils.openAdminSession(repository);
+// try {
+// if (NodeConstants.NODE.equals(cn))
+// for (String workspaceName : session.getWorkspace().getAccessibleWorkspaceNames()) {
+// addMonitor(workspaceName);
+// }
+// } catch (RepositoryException e) {
+// throw new IllegalStateException(e);
+// } finally {
+// JcrUtils.logoutQuietly(session);
+// }
+ }
+
+ protected RepositoryImpl getJackrabbitrepository(String workspaceName) {
+ return (RepositoryImpl) getRepository(workspaceName);
+ }
+
+ @Override
+ protected synchronized void processNewSession(Session session, String workspaceName) {
+// String realWorkspaceName = session.getWorkspace().getName();
+// addMonitor(realWorkspaceName);
+ }
+
+ private void addMonitor(String realWorkspaceName) {
+ if (realWorkspaceName.equals(SECURITY_WORKSPACE))
+ return;
+ if (!NodeConstants.NODE_REPOSITORY.equals(getCn()))
+ return;
+
+ if (!workspaceMonitors.containsKey(realWorkspaceName)) {
+ try {
+ CmsWorkspaceIndexer workspaceMonitor = new CmsWorkspaceIndexer(
+ getJackrabbitrepository(realWorkspaceName), getCn(), realWorkspaceName);
+ workspaceMonitors.put(realWorkspaceName, workspaceMonitor);
+ workspaceMonitor.init();
+ if (log.isDebugEnabled())
+ log.debug("Registered " + workspaceMonitor);
+ } catch (RepositoryException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ }
+
+ public void destroy() {
+ for (String workspaceName : workspaceMonitors.keySet()) {
+ workspaceMonitors.get(workspaceName).destroy();
+ }
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import static org.argeo.api.DataModelNamespace.CMS_DATA_MODEL_NAMESPACE;
+import static org.osgi.service.http.whiteboard.HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Hashtable;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.security.auth.callback.CallbackHandler;
+import javax.servlet.Servlet;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.commons.cnd.CndImporter;
+import org.apache.jackrabbit.core.RepositoryContext;
+import org.apache.jackrabbit.core.RepositoryImpl;
+import org.argeo.api.DataModelNamespace;
+import org.argeo.api.NodeConstants;
+import org.argeo.api.NodeDeployment;
+import org.argeo.api.NodeUtils;
+import org.argeo.api.security.CryptoKeyring;
+import org.argeo.api.security.Keyring;
+import org.argeo.cms.ArgeoNames;
+import org.argeo.cms.internal.jcr.JcrInitUtils;
+import org.argeo.cms.jcr.internal.servlet.CmsRemotingServlet;
+import org.argeo.cms.jcr.internal.servlet.CmsWebDavServlet;
+import org.argeo.cms.jcr.internal.servlet.JcrHttpUtils;
+import org.argeo.jcr.Jcr;
+import org.argeo.jcr.JcrException;
+import org.argeo.jcr.JcrUtils;
+import org.argeo.naming.LdapAttrs;
+import org.argeo.util.LangUtils;
+import org.osgi.framework.Bundle;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Constants;
+import org.osgi.framework.FrameworkUtil;
+import org.osgi.framework.InvalidSyntaxException;
+import org.osgi.framework.ServiceReference;
+import org.osgi.framework.wiring.BundleCapability;
+import org.osgi.framework.wiring.BundleWire;
+import org.osgi.framework.wiring.BundleWiring;
+import org.osgi.service.cm.ManagedService;
+import org.osgi.service.http.whiteboard.HttpWhiteboardConstants;
+import org.osgi.util.tracker.ServiceTracker;
+
+/** Implementation of a CMS deployment. */
+public class JcrDeployment {
+ private final Log log = LogFactory.getLog(getClass());
+ private final BundleContext bc = FrameworkUtil.getBundle(getClass()).getBundleContext();
+
+ private DataModels dataModels;
+ private String webDavConfig = JcrHttpUtils.WEBDAV_CONFIG;
+
+ private boolean argeoDataModelExtensionsAvailable = false;
+
+ // Readiness
+ private boolean nodeAvailable = false;
+
+ NodeDeployment nodeDeployment;
+
+ public JcrDeployment() {
+ dataModels = new DataModels(bc);
+// initTrackers();
+ }
+
+ public void init() {
+
+ ServiceTracker<?, ?> repoContextSt = new RepositoryContextStc();
+ // repoContextSt.open();
+ KernelUtils.asyncOpen(repoContextSt);
+
+// nodeDeployment = CmsJcrActivator.getService(NodeDeployment.class);
+
+ JcrInitUtils.addToDeployment(nodeDeployment);
+
+ }
+
+ public void destroy() {
+// if (nodeHttp != null)
+// nodeHttp.destroy();
+
+ try {
+ for (ServiceReference<JackrabbitLocalRepository> sr : bc
+ .getServiceReferences(JackrabbitLocalRepository.class, null)) {
+ bc.getService(sr).destroy();
+ }
+ } catch (InvalidSyntaxException e1) {
+ log.error("Cannot clean repositories", e1);
+ }
+
+ }
+
+ public void setNodeDeployment(NodeDeployment nodeDeployment) {
+ this.nodeDeployment = nodeDeployment;
+ }
+
+ /**
+ * Checks whether the deployment is available according to expectations, and
+ * mark it as available.
+ */
+// private synchronized void checkReadiness() {
+// if (isAvailable())
+// return;
+// if (nodeAvailable && userAdminAvailable && (httpExpected ? httpAvailable : true)) {
+// String data = KernelUtils.getFrameworkProp(KernelUtils.OSGI_INSTANCE_AREA);
+// String state = KernelUtils.getFrameworkProp(KernelUtils.OSGI_CONFIGURATION_AREA);
+// availableSince = System.currentTimeMillis();
+// long jvmUptime = ManagementFactory.getRuntimeMXBean().getUptime();
+// String jvmUptimeStr = " in " + (jvmUptime / 1000) + "." + (jvmUptime % 1000) + "s";
+// log.info("## ARGEO NODE AVAILABLE" + (log.isDebugEnabled() ? jvmUptimeStr : "") + " ##");
+// if (log.isDebugEnabled()) {
+// log.debug("## state: " + state);
+// if (data != null)
+// log.debug("## data: " + data);
+// }
+// long begin = bc.getService(bc.getServiceReference(NodeState.class)).getAvailableSince();
+// long initDuration = System.currentTimeMillis() - begin;
+// if (log.isTraceEnabled())
+// log.trace("Kernel initialization took " + initDuration + "ms");
+// tributeToFreeSoftware(initDuration);
+// }
+// }
+
+ private void prepareNodeRepository(Repository deployedNodeRepository, List<String> publishAsLocalRepo) {
+// if (availableSince != null) {
+// throw new IllegalStateException("Deployment is already available");
+// }
+
+ // home
+ prepareDataModel(NodeConstants.NODE_REPOSITORY, deployedNodeRepository, publishAsLocalRepo);
+
+ // init from backup
+// if (deployConfig.isFirstInit()) {
+// Path restorePath = Paths.get(System.getProperty("user.dir"), "restore");
+// if (Files.exists(restorePath)) {
+// if (log.isDebugEnabled())
+// log.debug("Found backup " + restorePath + ", restoring it...");
+// LogicalRestore logicalRestore = new LogicalRestore(bc, deployedNodeRepository, restorePath);
+// KernelUtils.doAsDataAdmin(logicalRestore);
+// log.info("Restored backup from " + restorePath);
+// }
+// }
+
+ // init from repository
+ Collection<ServiceReference<Repository>> initRepositorySr;
+ try {
+ initRepositorySr = bc.getServiceReferences(Repository.class,
+ "(" + NodeConstants.CN + "=" + NodeConstants.NODE_INIT + ")");
+ } catch (InvalidSyntaxException e1) {
+ throw new IllegalArgumentException(e1);
+ }
+ Iterator<ServiceReference<Repository>> it = initRepositorySr.iterator();
+ while (it.hasNext()) {
+ ServiceReference<Repository> sr = it.next();
+ Object labeledUri = sr.getProperties().get(LdapAttrs.labeledURI.name());
+ Repository initRepository = bc.getService(sr);
+ if (log.isDebugEnabled())
+ log.debug("Found init repository " + labeledUri + ", copying it...");
+ initFromRepository(deployedNodeRepository, initRepository);
+ log.info("Node repository initialised from " + labeledUri);
+ }
+ }
+
+ /** Init from a (typically remote) repository. */
+ private void initFromRepository(Repository deployedNodeRepository, Repository initRepository) {
+ Session initSession = null;
+ try {
+ initSession = initRepository.login();
+ workspaces: for (String workspaceName : initSession.getWorkspace().getAccessibleWorkspaceNames()) {
+ if ("security".equals(workspaceName))
+ continue workspaces;
+ if (log.isDebugEnabled())
+ log.debug("Copying workspace " + workspaceName + " from init repository...");
+ long begin = System.currentTimeMillis();
+ Session targetSession = null;
+ Session sourceSession = null;
+ try {
+ try {
+ targetSession = NodeUtils.openDataAdminSession(deployedNodeRepository, workspaceName);
+ } catch (IllegalArgumentException e) {// no such workspace
+ Session adminSession = NodeUtils.openDataAdminSession(deployedNodeRepository, null);
+ try {
+ adminSession.getWorkspace().createWorkspace(workspaceName);
+ } finally {
+ Jcr.logout(adminSession);
+ }
+ targetSession = NodeUtils.openDataAdminSession(deployedNodeRepository, workspaceName);
+ }
+ sourceSession = initRepository.login(workspaceName);
+// JcrUtils.copyWorkspaceXml(sourceSession, targetSession);
+ // TODO deal with referenceable nodes
+ JcrUtils.copy(sourceSession.getRootNode(), targetSession.getRootNode());
+ targetSession.save();
+ long duration = System.currentTimeMillis() - begin;
+ if (log.isDebugEnabled())
+ log.debug("Copied workspace " + workspaceName + " from init repository in " + (duration / 1000)
+ + " s");
+ } catch (Exception e) {
+ log.error("Cannot copy workspace " + workspaceName + " from init repository.", e);
+ } finally {
+ Jcr.logout(sourceSession);
+ Jcr.logout(targetSession);
+ }
+ }
+ } catch (RepositoryException e) {
+ throw new JcrException(e);
+ } finally {
+ Jcr.logout(initSession);
+ }
+ }
+
+ private void prepareHomeRepository(RepositoryImpl deployedRepository) {
+ Session adminSession = KernelUtils.openAdminSession(deployedRepository);
+ try {
+ argeoDataModelExtensionsAvailable = Arrays
+ .asList(adminSession.getWorkspace().getNamespaceRegistry().getURIs())
+ .contains(ArgeoNames.ARGEO_NAMESPACE);
+ } catch (RepositoryException e) {
+ log.warn("Cannot check whether Argeo namespace is registered assuming it isn't.", e);
+ argeoDataModelExtensionsAvailable = false;
+ } finally {
+ JcrUtils.logoutQuietly(adminSession);
+ }
+
+ // Publish home with the highest service ranking
+ Hashtable<String, Object> regProps = new Hashtable<>();
+ regProps.put(NodeConstants.CN, NodeConstants.EGO_REPOSITORY);
+ regProps.put(Constants.SERVICE_RANKING, Integer.MAX_VALUE);
+ Repository egoRepository = new EgoRepository(deployedRepository, false);
+ bc.registerService(Repository.class, egoRepository, regProps);
+ registerRepositoryServlets(NodeConstants.EGO_REPOSITORY, egoRepository);
+
+ // Keyring only if Argeo extensions are available
+ if (argeoDataModelExtensionsAvailable) {
+ new ServiceTracker<CallbackHandler, CallbackHandler>(bc, CallbackHandler.class, null) {
+
+ @Override
+ public CallbackHandler addingService(ServiceReference<CallbackHandler> reference) {
+ NodeKeyRing nodeKeyring = new NodeKeyRing(egoRepository);
+ CallbackHandler callbackHandler = bc.getService(reference);
+ nodeKeyring.setDefaultCallbackHandler(callbackHandler);
+ bc.registerService(LangUtils.names(Keyring.class, CryptoKeyring.class, ManagedService.class),
+ nodeKeyring, LangUtils.dict(Constants.SERVICE_PID, NodeConstants.NODE_KEYRING_PID));
+ return callbackHandler;
+ }
+
+ }.open();
+ }
+ }
+
+ /** Session is logged out. */
+ private void prepareDataModel(String cn, Repository repository, List<String> publishAsLocalRepo) {
+ Session adminSession = KernelUtils.openAdminSession(repository);
+ try {
+ Set<String> processed = new HashSet<String>();
+ bundles: for (Bundle bundle : bc.getBundles()) {
+ BundleWiring wiring = bundle.adapt(BundleWiring.class);
+ if (wiring == null)
+ continue bundles;
+ if (NodeConstants.NODE_REPOSITORY.equals(cn))// process all data models
+ processWiring(cn, adminSession, wiring, processed, false, publishAsLocalRepo);
+ else {
+ List<BundleCapability> capabilities = wiring.getCapabilities(CMS_DATA_MODEL_NAMESPACE);
+ for (BundleCapability capability : capabilities) {
+ String dataModelName = (String) capability.getAttributes().get(DataModelNamespace.NAME);
+ if (dataModelName.equals(cn))// process only own data model
+ processWiring(cn, adminSession, wiring, processed, false, publishAsLocalRepo);
+ }
+ }
+ }
+ } finally {
+ JcrUtils.logoutQuietly(adminSession);
+ }
+ }
+
+ private void processWiring(String cn, Session adminSession, BundleWiring wiring, Set<String> processed,
+ boolean importListedAbstractModels, List<String> publishAsLocalRepo) {
+ // recursively process requirements first
+ List<BundleWire> requiredWires = wiring.getRequiredWires(CMS_DATA_MODEL_NAMESPACE);
+ for (BundleWire wire : requiredWires) {
+ processWiring(cn, adminSession, wire.getProviderWiring(), processed, true, publishAsLocalRepo);
+ }
+
+ List<BundleCapability> capabilities = wiring.getCapabilities(CMS_DATA_MODEL_NAMESPACE);
+ capabilities: for (BundleCapability capability : capabilities) {
+ if (!importListedAbstractModels
+ && KernelUtils.asBoolean((String) capability.getAttributes().get(DataModelNamespace.ABSTRACT))) {
+ continue capabilities;
+ }
+ boolean publish = registerDataModelCapability(cn, adminSession, capability, processed);
+ if (publish)
+ publishAsLocalRepo.add((String) capability.getAttributes().get(DataModelNamespace.NAME));
+ }
+ }
+
+ private boolean registerDataModelCapability(String cn, Session adminSession, BundleCapability capability,
+ Set<String> processed) {
+ Map<String, Object> attrs = capability.getAttributes();
+ String name = (String) attrs.get(DataModelNamespace.NAME);
+ if (processed.contains(name)) {
+ if (log.isTraceEnabled())
+ log.trace("Data model " + name + " has already been processed");
+ return false;
+ }
+
+ // CND
+ String path = (String) attrs.get(DataModelNamespace.CND);
+ if (path != null) {
+ File dataModel = bc.getBundle().getDataFile("dataModels/" + path);
+ if (!dataModel.exists()) {
+ URL url = capability.getRevision().getBundle().getResource(path);
+ if (url == null)
+ throw new IllegalArgumentException("No data model '" + name + "' found under path " + path);
+ try (Reader reader = new InputStreamReader(url.openStream())) {
+ CndImporter.registerNodeTypes(reader, adminSession, true);
+ processed.add(name);
+ dataModel.getParentFile().mkdirs();
+ dataModel.createNewFile();
+ if (log.isDebugEnabled())
+ log.debug("Registered CND " + url);
+ } catch (Exception e) {
+ log.error("Cannot import CND " + url, e);
+ }
+ }
+ }
+
+ if (KernelUtils.asBoolean((String) attrs.get(DataModelNamespace.ABSTRACT)))
+ return false;
+ // Non abstract
+ boolean isStandalone = isStandalone(name);
+ boolean publishLocalRepo;
+ if (isStandalone && name.equals(cn))// includes the node itself
+ publishLocalRepo = true;
+ else if (!isStandalone && cn.equals(NodeConstants.NODE_REPOSITORY))
+ publishLocalRepo = true;
+ else
+ publishLocalRepo = false;
+
+ return publishLocalRepo;
+ }
+
+ boolean isStandalone(String dataModelName) {
+ return nodeDeployment.getProps(NodeConstants.NODE_REPOS_FACTORY_PID, dataModelName) != null;
+ }
+
+ private void publishLocalRepo(String dataModelName, Repository repository) {
+ Hashtable<String, Object> properties = new Hashtable<>();
+ properties.put(NodeConstants.CN, dataModelName);
+ LocalRepository localRepository;
+ String[] classes;
+ if (repository instanceof RepositoryImpl) {
+ localRepository = new JackrabbitLocalRepository((RepositoryImpl) repository, dataModelName);
+ classes = new String[] { Repository.class.getName(), LocalRepository.class.getName(),
+ JackrabbitLocalRepository.class.getName() };
+ } else {
+ localRepository = new LocalRepository(repository, dataModelName);
+ classes = new String[] { Repository.class.getName(), LocalRepository.class.getName() };
+ }
+ bc.registerService(classes, localRepository, properties);
+
+ // TODO make it configurable
+ registerRepositoryServlets(dataModelName, localRepository);
+ if (log.isTraceEnabled())
+ log.trace("Published data model " + dataModelName);
+ }
+
+// @Override
+// public synchronized Long getAvailableSince() {
+// return availableSince;
+// }
+//
+// public synchronized boolean isAvailable() {
+// return availableSince != null;
+// }
+
+ protected void registerRepositoryServlets(String alias, Repository repository) {
+ // FIXME re-enable it with a proper class loader
+// registerRemotingServlet(alias, repository);
+// registerWebdavServlet(alias, repository);
+ }
+
+ protected void registerWebdavServlet(String alias, Repository repository) {
+ CmsWebDavServlet webdavServlet = new CmsWebDavServlet(alias, repository);
+ Hashtable<String, String> ip = new Hashtable<>();
+ ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsWebDavServlet.INIT_PARAM_RESOURCE_CONFIG, webDavConfig);
+ ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsWebDavServlet.INIT_PARAM_RESOURCE_PATH_PREFIX,
+ "/" + alias);
+
+ ip.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/" + alias + "/*");
+ ip.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_SELECT,
+ "(" + HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_PATH + "=" + NodeConstants.PATH_DATA + ")");
+ bc.registerService(Servlet.class, webdavServlet, ip);
+ }
+
+ protected void registerRemotingServlet(String alias, Repository repository) {
+ CmsRemotingServlet remotingServlet = new CmsRemotingServlet(alias, repository);
+ Hashtable<String, String> ip = new Hashtable<>();
+ ip.put(NodeConstants.CN, alias);
+ // Properties ip = new Properties();
+ ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_RESOURCE_PATH_PREFIX,
+ "/" + alias);
+ ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_AUTHENTICATE_HEADER,
+ "Negotiate");
+
+ // Looks like a bug in Jackrabbit remoting init
+ Path tmpDir;
+ try {
+ tmpDir = Files.createTempDirectory("remoting_" + alias);
+ } catch (IOException e) {
+ throw new RuntimeException("Cannot create temp directory for remoting servlet", e);
+ }
+ ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_HOME, tmpDir.toString());
+ ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_TMP_DIRECTORY,
+ "remoting_" + alias);
+ ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_PROTECTED_HANDLERS_CONFIG,
+ JcrHttpUtils.DEFAULT_PROTECTED_HANDLERS);
+ ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_CREATE_ABSOLUTE_URI, "false");
+
+ ip.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/" + alias + "/*");
+ ip.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_SELECT,
+ "(" + HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_PATH + "=" + NodeConstants.PATH_JCR + ")");
+ bc.registerService(Servlet.class, remotingServlet, ip);
+ }
+
+ private class RepositoryContextStc extends ServiceTracker<RepositoryContext, RepositoryContext> {
+
+ public RepositoryContextStc() {
+ super(bc, RepositoryContext.class, null);
+ }
+
+ @Override
+ public RepositoryContext addingService(ServiceReference<RepositoryContext> reference) {
+ RepositoryContext repoContext = bc.getService(reference);
+ String cn = (String) reference.getProperty(NodeConstants.CN);
+ if (cn != null) {
+ List<String> publishAsLocalRepo = new ArrayList<>();
+ if (cn.equals(NodeConstants.NODE_REPOSITORY)) {
+// JackrabbitDataModelMigration.clearRepositoryCaches(repoContext.getRepositoryConfig());
+ prepareNodeRepository(repoContext.getRepository(), publishAsLocalRepo);
+ // TODO separate home repository
+ prepareHomeRepository(repoContext.getRepository());
+ registerRepositoryServlets(cn, repoContext.getRepository());
+ nodeAvailable = true;
+// checkReadiness();
+ } else {
+ prepareDataModel(cn, repoContext.getRepository(), publishAsLocalRepo);
+ }
+ // Publish all at once, so that bundles with multiple CNDs are consistent
+ for (String dataModelName : publishAsLocalRepo)
+ publishLocalRepo(dataModelName, repoContext.getRepository());
+ }
+ return repoContext;
+ }
+
+ @Override
+ public void modifiedService(ServiceReference<RepositoryContext> reference, RepositoryContext service) {
+ }
+
+ @Override
+ public void removedService(ServiceReference<RepositoryContext> reference, RepositoryContext service) {
+ }
+
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.io.ByteArrayInputStream;
+import java.io.CharArrayReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+import java.nio.charset.StandardCharsets;
+import java.security.GeneralSecurityException;
+import java.security.NoSuchAlgorithmException;
+import java.security.Provider;
+import java.security.SecureRandom;
+
+import javax.crypto.Cipher;
+import javax.crypto.CipherInputStream;
+import javax.crypto.NoSuchPaddingException;
+import javax.crypto.SecretKey;
+import javax.crypto.spec.IvParameterSpec;
+import javax.jcr.Binary;
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.Property;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.query.Query;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.api.NodeConstants;
+import org.argeo.api.NodeUtils;
+import org.argeo.api.security.PBEKeySpecCallback;
+import org.argeo.cms.ArgeoNames;
+import org.argeo.cms.ArgeoTypes;
+import org.argeo.cms.security.AbstractKeyring;
+import org.argeo.jcr.JcrException;
+import org.argeo.jcr.JcrUtils;
+
+/** JCR based implementation of a keyring */
+public class JcrKeyring extends AbstractKeyring implements ArgeoNames {
+ private final static Log log = LogFactory.getLog(JcrKeyring.class);
+ /**
+ * Stronger with 256, but causes problem with Oracle JVM, force 128 in this case
+ */
+ public final static Long DEFAULT_SECRETE_KEY_LENGTH = 256l;
+ public final static String DEFAULT_SECRETE_KEY_FACTORY = "PBKDF2WithHmacSHA1";
+ public final static String DEFAULT_SECRETE_KEY_ENCRYPTION = "AES";
+ public final static String DEFAULT_CIPHER_NAME = "AES/CBC/PKCS5Padding";
+
+ private Integer iterationCountFactor = 200;
+ private Long secretKeyLength = DEFAULT_SECRETE_KEY_LENGTH;
+ private String secretKeyFactoryName = DEFAULT_SECRETE_KEY_FACTORY;
+ private String secretKeyEncryption = DEFAULT_SECRETE_KEY_ENCRYPTION;
+ private String cipherName = DEFAULT_CIPHER_NAME;
+
+ private final Repository repository;
+ // TODO remove thread local session ; open a session each time
+ private ThreadLocal<Session> sessionThreadLocal = new ThreadLocal<Session>() {
+
+ @Override
+ protected Session initialValue() {
+ return login();
+ }
+
+ };
+
+ // FIXME is it really still needed?
+ /**
+ * When setup is called the session has not yet been saved and we don't want to
+ * save it since there maybe other data which would be inconsistent. So we keep
+ * a reference to this node which will then be used (an reset to null) when
+ * handling the PBE callback. We keep one per thread in case multiple users are
+ * accessing the same instance of a keyring.
+ */
+ // private ThreadLocal<Node> notYetSavedKeyring = new ThreadLocal<Node>() {
+ //
+ // @Override
+ // protected Node initialValue() {
+ // return null;
+ // }
+ // };
+
+ public JcrKeyring(Repository repository) {
+ this.repository = repository;
+ }
+
+ private Session session() {
+ Session session = this.sessionThreadLocal.get();
+ if (!session.isLive()) {
+ session = login();
+ sessionThreadLocal.set(session);
+ }
+ return session;
+ }
+
+ private Session login() {
+ try {
+ return repository.login(NodeConstants.HOME_WORKSPACE);
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot login key ring session", e);
+ }
+ }
+
+ @Override
+ protected synchronized Boolean isSetup() {
+ Session session = null;
+ try {
+ // if (notYetSavedKeyring.get() != null)
+ // return true;
+ session = session();
+ session.refresh(true);
+ Node userHome = NodeUtils.getUserHome(session);
+ return userHome.hasNode(ARGEO_KEYRING);
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot check whether keyring is setup", e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ @Override
+ protected synchronized void setup(char[] password) {
+ Binary binary = null;
+ // InputStream in = null;
+ try {
+ session().refresh(true);
+ Node userHome = NodeUtils.getUserHome(session());
+ Node keyring;
+ if (userHome.hasNode(ARGEO_KEYRING)) {
+ throw new IllegalArgumentException("Keyring already set up");
+ } else {
+ keyring = userHome.addNode(ARGEO_KEYRING);
+ }
+ keyring.addMixin(ArgeoTypes.ARGEO_PBE_SPEC);
+
+ // deterministic salt and iteration count based on username
+ String username = session().getUserID();
+ byte[] salt = new byte[8];
+ byte[] usernameBytes = username.getBytes(StandardCharsets.UTF_8);
+ for (int i = 0; i < salt.length; i++) {
+ if (i < usernameBytes.length)
+ salt[i] = usernameBytes[i];
+ else
+ salt[i] = 0;
+ }
+ try (InputStream in = new ByteArrayInputStream(salt);) {
+ binary = session().getValueFactory().createBinary(in);
+ keyring.setProperty(ARGEO_SALT, binary);
+ } catch (IOException e) {
+ throw new RuntimeException("Cannot set keyring salt", e);
+ }
+
+ Integer iterationCount = username.length() * iterationCountFactor;
+ keyring.setProperty(ARGEO_ITERATION_COUNT, iterationCount);
+
+ // default algo
+ // TODO check if algo and key length are available, use DES if not
+ keyring.setProperty(ARGEO_SECRET_KEY_FACTORY, secretKeyFactoryName);
+ keyring.setProperty(ARGEO_KEY_LENGTH, secretKeyLength);
+ keyring.setProperty(ARGEO_SECRET_KEY_ENCRYPTION, secretKeyEncryption);
+ keyring.setProperty(ARGEO_CIPHER, cipherName);
+
+ keyring.getSession().save();
+
+ // encrypted password hash
+ // IOUtils.closeQuietly(in);
+ // JcrUtils.closeQuietly(binary);
+ // byte[] btPass = hash(password, salt, iterationCount);
+ // in = new ByteArrayInputStream(btPass);
+ // binary = session().getValueFactory().createBinary(in);
+ // keyring.setProperty(ARGEO_PASSWORD, binary);
+
+ // notYetSavedKeyring.set(keyring);
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot setup keyring", e);
+ } finally {
+ JcrUtils.closeQuietly(binary);
+ // IOUtils.closeQuietly(in);
+ // JcrUtils.discardQuietly(session());
+ }
+ }
+
+ @Override
+ protected synchronized void handleKeySpecCallback(PBEKeySpecCallback pbeCallback) {
+ Session session = null;
+ try {
+ session = session();
+ session.refresh(true);
+ Node userHome = NodeUtils.getUserHome(session);
+ Node keyring;
+ if (userHome.hasNode(ARGEO_KEYRING))
+ keyring = userHome.getNode(ARGEO_KEYRING);
+ // else if (notYetSavedKeyring.get() != null)
+ // keyring = notYetSavedKeyring.get();
+ else
+ throw new IllegalStateException("Keyring not setup");
+
+ pbeCallback.set(keyring.getProperty(ARGEO_SECRET_KEY_FACTORY).getString(),
+ JcrUtils.getBinaryAsBytes(keyring.getProperty(ARGEO_SALT)),
+ (int) keyring.getProperty(ARGEO_ITERATION_COUNT).getLong(),
+ (int) keyring.getProperty(ARGEO_KEY_LENGTH).getLong(),
+ keyring.getProperty(ARGEO_SECRET_KEY_ENCRYPTION).getString());
+
+ // if (notYetSavedKeyring.get() != null)
+ // notYetSavedKeyring.remove();
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot handle key spec callback", e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ /** The parent node must already exist at this path. */
+ @Override
+ protected synchronized void encrypt(String path, InputStream unencrypted) {
+ // should be called first for lazy initialization
+ SecretKey secretKey = getSecretKey(null);
+ Cipher cipher = createCipher();
+
+ // Binary binary = null;
+ // InputStream in = null;
+ try {
+ session().refresh(true);
+ Node node;
+ if (!session().nodeExists(path)) {
+ String parentPath = JcrUtils.parentPath(path);
+ if (!session().nodeExists(parentPath))
+ throw new IllegalStateException("No parent node of " + path);
+ Node parentNode = session().getNode(parentPath);
+ node = parentNode.addNode(JcrUtils.nodeNameFromPath(path));
+ } else {
+ node = session().getNode(path);
+ }
+ encrypt(secretKey, cipher, node, unencrypted);
+ // node.addMixin(ArgeoTypes.ARGEO_ENCRYPTED);
+ // SecureRandom random = new SecureRandom();
+ // byte[] iv = new byte[16];
+ // random.nextBytes(iv);
+ // cipher.init(Cipher.ENCRYPT_MODE, secretKey, new IvParameterSpec(iv));
+ // JcrUtils.setBinaryAsBytes(node, ARGEO_IV, iv);
+ //
+ // try (InputStream in = new CipherInputStream(unencrypted, cipher);) {
+ // binary = session().getValueFactory().createBinary(in);
+ // node.setProperty(Property.JCR_DATA, binary);
+ // session().save();
+ // }
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot encrypt", e);
+ } finally {
+ try {
+ unencrypted.close();
+ } catch (IOException e) {
+ // silent
+ }
+ // IOUtils.closeQuietly(unencrypted);
+ // IOUtils.closeQuietly(in);
+ // JcrUtils.closeQuietly(binary);
+ JcrUtils.logoutQuietly(session());
+ }
+ }
+
+ protected synchronized void encrypt(SecretKey secretKey, Cipher cipher, Node node, InputStream unencrypted) {
+ try {
+ node.addMixin(ArgeoTypes.ARGEO_ENCRYPTED);
+ SecureRandom random = new SecureRandom();
+ byte[] iv = new byte[16];
+ random.nextBytes(iv);
+ cipher.init(Cipher.ENCRYPT_MODE, secretKey, new IvParameterSpec(iv));
+ JcrUtils.setBinaryAsBytes(node, ARGEO_IV, iv);
+
+ Binary binary = null;
+ try (InputStream in = new CipherInputStream(unencrypted, cipher);) {
+ binary = session().getValueFactory().createBinary(in);
+ node.setProperty(Property.JCR_DATA, binary);
+ session().save();
+ } finally {
+ JcrUtils.closeQuietly(binary);
+ }
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot encrypt", e);
+ } catch (GeneralSecurityException | IOException e) {
+ throw new RuntimeException("Cannot encrypt", e);
+ }
+ }
+
+ @Override
+ protected synchronized InputStream decrypt(String path) {
+ Binary binary = null;
+ try {
+ session().refresh(true);
+ if (!session().nodeExists(path)) {
+ char[] password = ask();
+ Reader reader = new CharArrayReader(password);
+ return new ByteArrayInputStream(IOUtils.toByteArray(reader, StandardCharsets.UTF_8));
+ } else {
+ // should be called first for lazy initialisation
+ SecretKey secretKey = getSecretKey(null);
+ Cipher cipher = createCipher();
+ Node node = session().getNode(path);
+ return decrypt(secretKey, cipher, node);
+ }
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot decrypt", e);
+ } catch (GeneralSecurityException | IOException e) {
+ throw new RuntimeException("Cannot decrypt", e);
+ } finally {
+ JcrUtils.closeQuietly(binary);
+ JcrUtils.logoutQuietly(session());
+ }
+ }
+
+ protected synchronized InputStream decrypt(SecretKey secretKey, Cipher cipher, Node node)
+ throws RepositoryException, GeneralSecurityException {
+ if (node.hasProperty(ARGEO_IV)) {
+ byte[] iv = JcrUtils.getBinaryAsBytes(node.getProperty(ARGEO_IV));
+ cipher.init(Cipher.DECRYPT_MODE, secretKey, new IvParameterSpec(iv));
+ } else {
+ cipher.init(Cipher.DECRYPT_MODE, secretKey);
+ }
+
+ Binary binary = node.getProperty(Property.JCR_DATA).getBinary();
+ InputStream encrypted = binary.getStream();
+ return new CipherInputStream(encrypted, cipher);
+ }
+
+ protected Cipher createCipher() {
+ try {
+ Node userHome = NodeUtils.getUserHome(session());
+ if (!userHome.hasNode(ARGEO_KEYRING))
+ throw new IllegalArgumentException("Keyring not setup");
+ Node keyring = userHome.getNode(ARGEO_KEYRING);
+ String cipherName = keyring.getProperty(ARGEO_CIPHER).getString();
+ Provider securityProvider = getSecurityProvider();
+ Cipher cipher;
+ if (securityProvider == null)// TODO use BC?
+ cipher = Cipher.getInstance(cipherName);
+ else
+ cipher = Cipher.getInstance(cipherName, securityProvider);
+ return cipher;
+ } catch (NoSuchAlgorithmException | NoSuchPaddingException e) {
+ throw new IllegalArgumentException("Cannot get cipher", e);
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot get cipher", e);
+ } finally {
+
+ }
+ }
+
+ public synchronized void changePassword(char[] oldPassword, char[] newPassword) {
+ // TODO make it XA compatible
+ SecretKey oldSecretKey = getSecretKey(oldPassword);
+ SecretKey newSecretKey = getSecretKey(newPassword);
+ Session session = session();
+ try {
+ NodeIterator encryptedNodes = session.getWorkspace().getQueryManager()
+ .createQuery("select * from [argeo:encrypted]", Query.JCR_SQL2).execute().getNodes();
+ while (encryptedNodes.hasNext()) {
+ Node node = encryptedNodes.nextNode();
+ InputStream in = decrypt(oldSecretKey, createCipher(), node);
+ encrypt(newSecretKey, createCipher(), node, in);
+ if (log.isDebugEnabled())
+ log.debug("Converted keyring encrypted value of " + node.getPath());
+ }
+ } catch (GeneralSecurityException e) {
+ throw new RuntimeException("Cannot change JCR keyring password", e);
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot change JCR keyring password", e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ // public synchronized void setSession(Session session) {
+ // this.session = session;
+ // }
+
+ public void setIterationCountFactor(Integer iterationCountFactor) {
+ this.iterationCountFactor = iterationCountFactor;
+ }
+
+ public void setSecretKeyLength(Long keyLength) {
+ this.secretKeyLength = keyLength;
+ }
+
+ public void setSecretKeyFactoryName(String secreteKeyFactoryName) {
+ this.secretKeyFactoryName = secreteKeyFactoryName;
+ }
+
+ public void setSecretKeyEncryption(String secreteKeyEncryption) {
+ this.secretKeyEncryption = secreteKeyEncryption;
+ }
+
+ public void setCipherName(String cipherName) {
+ this.cipherName = cipherName;
+ }
+
+}
\ No newline at end of file
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import org.argeo.api.NodeConstants;
+
+/** Internal CMS constants. */
+@Deprecated
+public interface KernelConstants {
+ // Directories
+ String DIR_NODE = "node";
+ String DIR_REPOS = "repos";
+ String DIR_INDEXES = "indexes";
+ String DIR_TRANSACTIONS = "transactions";
+
+ // Files
+ String DEPLOY_CONFIG_PATH = DIR_NODE + '/' + NodeConstants.DEPLOY_BASEDN + ".ldif";
+ String DEFAULT_KEYSTORE_PATH = DIR_NODE + '/' + NodeConstants.NODE + ".p12";
+ String DEFAULT_PEM_KEY_PATH = DIR_NODE + '/' + NodeConstants.NODE + ".key";
+ String DEFAULT_PEM_CERT_PATH = DIR_NODE + '/' + NodeConstants.NODE + ".crt";
+ String NODE_KEY_TAB_PATH = DIR_NODE + "/krb5.keytab";
+
+ // Security
+ String JAAS_CONFIG = "/org/argeo/cms/internal/kernel/jaas.cfg";
+ String JAAS_CONFIG_IPA = "/org/argeo/cms/internal/kernel/jaas-ipa.cfg";
+
+ // Java
+ String JAAS_CONFIG_PROP = "java.security.auth.login.config";
+
+ // DEFAULTS JCR PATH
+ String DEFAULT_HOME_BASE_PATH = "/home";
+ String DEFAULT_USERS_BASE_PATH = "/users";
+ String DEFAULT_GROUPS_BASE_PATH = "/groups";
+
+ // KERBEROS
+ String DEFAULT_KERBEROS_SERVICE = "HTTP";
+
+ // HTTP client
+ String COOKIE_POLICY_BROWSER_COMPATIBILITY = "compatibility";
+
+ // RWT / RAP
+ // String PATH_WORKBENCH = "/ui";
+ // String PATH_WORKBENCH_PUBLIC = PATH_WORKBENCH + "/public";
+
+ String JETTY_FACTORY_PID = "org.eclipse.equinox.http.jetty.config";
+ String WHITEBOARD_PATTERN_PROP = "osgi.http.whiteboard.servlet.pattern";
+ // default Jetty server configured via JettyConfigurator
+ String DEFAULT_JETTY_SERVER = "default";
+ String CMS_JETTY_CUSTOMIZER_CLASS = "org.argeo.equinox.jetty.CmsJettyCustomizer";
+
+ // avoid dependencies
+ String CONTEXT_NAME_PROP = "contextName";
+ String JACKRABBIT_REPOSITORY_URI = "org.apache.jackrabbit.repository.uri";
+ String JACKRABBIT_REMOTE_DEFAULT_WORKSPACE = "org.apache.jackrabbit.spi2davex.WorkspaceNameDefault";
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.PrivilegedAction;
+import java.security.URIParameter;
+import java.util.Dictionary;
+import java.util.Hashtable;
+import java.util.Properties;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.security.auth.Subject;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+
+import org.apache.commons.logging.Log;
+import org.argeo.api.DataModelNamespace;
+import org.argeo.api.NodeConstants;
+import org.argeo.cms.jcr.internal.osgi.CmsJcrActivator;
+import org.osgi.framework.BundleContext;
+import org.osgi.util.tracker.ServiceTracker;
+
+/** Package utilities */
+class KernelUtils implements KernelConstants {
+ final static String OSGI_INSTANCE_AREA = "osgi.instance.area";
+ final static String OSGI_CONFIGURATION_AREA = "osgi.configuration.area";
+
+ static void setJaasConfiguration(URL jaasConfigurationUrl) {
+ try {
+ URIParameter uriParameter = new URIParameter(jaasConfigurationUrl.toURI());
+ javax.security.auth.login.Configuration jaasConfiguration = javax.security.auth.login.Configuration
+ .getInstance("JavaLoginConfig", uriParameter);
+ javax.security.auth.login.Configuration.setConfiguration(jaasConfiguration);
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Cannot set configuration " + jaasConfigurationUrl, e);
+ }
+ }
+
+ static Dictionary<String, ?> asDictionary(Properties props) {
+ Hashtable<String, Object> hashtable = new Hashtable<String, Object>();
+ for (Object key : props.keySet()) {
+ hashtable.put(key.toString(), props.get(key));
+ }
+ return hashtable;
+ }
+
+ static Dictionary<String, ?> asDictionary(ClassLoader cl, String resource) {
+ Properties props = new Properties();
+ try {
+ props.load(cl.getResourceAsStream(resource));
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Cannot load " + resource + " from classpath", e);
+ }
+ return asDictionary(props);
+ }
+
+ static File getExecutionDir(String relativePath) {
+ File executionDir = new File(getFrameworkProp("user.dir"));
+ if (relativePath == null)
+ return executionDir;
+ try {
+ return new File(executionDir, relativePath).getCanonicalFile();
+ } catch (IOException e) {
+ throw new IllegalArgumentException("Cannot get canonical file", e);
+ }
+ }
+
+ static File getOsgiInstanceDir() {
+ return new File(getBundleContext().getProperty(OSGI_INSTANCE_AREA).substring("file:".length()))
+ .getAbsoluteFile();
+ }
+
+ static Path getOsgiInstancePath(String relativePath) {
+ return Paths.get(getOsgiInstanceUri(relativePath));
+ }
+
+ static URI getOsgiInstanceUri(String relativePath) {
+ String osgiInstanceBaseUri = getFrameworkProp(OSGI_INSTANCE_AREA);
+ if (osgiInstanceBaseUri != null)
+ return safeUri(osgiInstanceBaseUri + (relativePath != null ? relativePath : ""));
+ else
+ return Paths.get(System.getProperty("user.dir")).toUri();
+ }
+
+ static File getOsgiConfigurationFile(String relativePath) {
+ try {
+ return new File(new URI(getBundleContext().getProperty(OSGI_CONFIGURATION_AREA) + relativePath))
+ .getCanonicalFile();
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Cannot get configuration file for " + relativePath, e);
+ }
+ }
+
+ static String getFrameworkProp(String key, String def) {
+ BundleContext bundleContext = CmsJcrActivator.getBundleContext();
+ String value;
+ if (bundleContext != null)
+ value = bundleContext.getProperty(key);
+ else
+ value = System.getProperty(key);
+ if (value == null)
+ return def;
+ return value;
+ }
+
+ static String getFrameworkProp(String key) {
+ return getFrameworkProp(key, null);
+ }
+
+ // Security
+ // static Subject anonymousLogin() {
+ // Subject subject = new Subject();
+ // LoginContext lc;
+ // try {
+ // lc = new LoginContext(NodeConstants.LOGIN_CONTEXT_USER, subject);
+ // lc.login();
+ // return subject;
+ // } catch (LoginException e) {
+ // throw new CmsException("Cannot login as anonymous", e);
+ // }
+ // }
+
+ static void logFrameworkProperties(Log log) {
+ BundleContext bc = getBundleContext();
+ for (Object sysProp : new TreeSet<Object>(System.getProperties().keySet())) {
+ log.debug(sysProp + "=" + bc.getProperty(sysProp.toString()));
+ }
+ // String[] keys = { Constants.FRAMEWORK_STORAGE,
+ // Constants.FRAMEWORK_OS_NAME, Constants.FRAMEWORK_OS_VERSION,
+ // Constants.FRAMEWORK_PROCESSOR, Constants.FRAMEWORK_SECURITY,
+ // Constants.FRAMEWORK_TRUST_REPOSITORIES,
+ // Constants.FRAMEWORK_WINDOWSYSTEM, Constants.FRAMEWORK_VENDOR,
+ // Constants.FRAMEWORK_VERSION, Constants.FRAMEWORK_STORAGE_CLEAN,
+ // Constants.FRAMEWORK_LANGUAGE, Constants.FRAMEWORK_UUID };
+ // for (String key : keys)
+ // log.debug(key + "=" + bc.getProperty(key));
+ }
+
+ static void printSystemProperties(PrintStream out) {
+ TreeMap<String, String> display = new TreeMap<>();
+ for (Object key : System.getProperties().keySet())
+ display.put(key.toString(), System.getProperty(key.toString()));
+ for (String key : display.keySet())
+ out.println(key + "=" + display.get(key));
+ }
+
+ static Session openAdminSession(Repository repository) {
+ return openAdminSession(repository, null);
+ }
+
+ static Session openAdminSession(final Repository repository, final String workspaceName) {
+ LoginContext loginContext = loginAsDataAdmin();
+ return Subject.doAs(loginContext.getSubject(), new PrivilegedAction<Session>() {
+
+ @Override
+ public Session run() {
+ try {
+ return repository.login(workspaceName);
+ } catch (RepositoryException e) {
+ throw new IllegalStateException("Cannot open admin session", e);
+ } finally {
+ try {
+ loginContext.logout();
+ } catch (LoginException e) {
+ throw new IllegalStateException(e);
+ }
+ }
+ }
+
+ });
+ }
+
+ static LoginContext loginAsDataAdmin() {
+ ClassLoader currentCl = Thread.currentThread().getContextClassLoader();
+ Thread.currentThread().setContextClassLoader(KernelUtils.class.getClassLoader());
+ LoginContext loginContext;
+ try {
+ loginContext = new LoginContext(NodeConstants.LOGIN_CONTEXT_DATA_ADMIN);
+ loginContext.login();
+ } catch (LoginException e1) {
+ throw new IllegalStateException("Could not login as data admin", e1);
+ } finally {
+ Thread.currentThread().setContextClassLoader(currentCl);
+ }
+ return loginContext;
+ }
+
+ static void doAsDataAdmin(Runnable action) {
+ LoginContext loginContext = loginAsDataAdmin();
+ Subject.doAs(loginContext.getSubject(), new PrivilegedAction<Void>() {
+
+ @Override
+ public Void run() {
+ try {
+ action.run();
+ return null;
+ } finally {
+ try {
+ loginContext.logout();
+ } catch (LoginException e) {
+ throw new IllegalStateException(e);
+ }
+ }
+ }
+
+ });
+ }
+
+ static void asyncOpen(ServiceTracker<?, ?> st) {
+ Runnable run = new Runnable() {
+
+ @Override
+ public void run() {
+ st.open();
+ }
+ };
+// Activator.getInternalExecutorService().execute(run);
+ new Thread(run, "Open service tracker " + st).start();
+ }
+
+ static BundleContext getBundleContext() {
+ return CmsJcrActivator.getBundleContext();
+ }
+
+ static boolean asBoolean(String value) {
+ if (value == null)
+ return false;
+ switch (value) {
+ case "true":
+ return true;
+ case "false":
+ return false;
+ default:
+ throw new IllegalArgumentException(
+ "Unsupported value for attribute " + DataModelNamespace.ABSTRACT + ": " + value);
+ }
+ }
+
+ private static URI safeUri(String uri) {
+ if (uri == null)
+ throw new IllegalArgumentException("URI cannot be null");
+ try {
+ return new URI(uri);
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException("Badly formatted URI " + uri, e);
+ }
+ }
+
+ private KernelUtils() {
+
+ }
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import javax.jcr.Repository;
+
+import org.argeo.api.NodeConstants;
+import org.argeo.jcr.JcrRepositoryWrapper;
+
+class LocalRepository extends JcrRepositoryWrapper {
+ private final String cn;
+
+ public LocalRepository(Repository repository, String cn) {
+ super(repository);
+ this.cn = cn;
+ // Map<String, Object> attrs = dataModelCapability.getAttributes();
+ // cn = (String) attrs.get(DataModelNamespace.NAME);
+ putDescriptor(NodeConstants.CN, cn);
+ }
+
+ String getCn() {
+ return cn;
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.util.Dictionary;
+
+import javax.jcr.Repository;
+
+import org.osgi.service.cm.ConfigurationException;
+import org.osgi.service.cm.ManagedService;
+
+class NodeKeyRing extends JcrKeyring implements ManagedService{
+
+ public NodeKeyRing(Repository repository) {
+ super(repository);
+ }
+
+ @Override
+ public void updated(Dictionary<String, ?> properties) throws ConfigurationException {
+ }
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.RepositoryFactory;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.jcr2dav.Jcr2davRepositoryFactory;
+import org.argeo.api.NodeConstants;
+import org.argeo.cms.internal.jcr.RepoConf;
+import org.argeo.cms.jcr.internal.osgi.CmsJcrActivator;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.InvalidSyntaxException;
+import org.osgi.framework.ServiceReference;
+
+/**
+ * OSGi-aware Jackrabbit repository factory which can retrieve/publish
+ * {@link Repository} as OSGi services.
+ */
+public class NodeRepositoryFactory implements RepositoryFactory {
+ private final Log log = LogFactory.getLog(getClass());
+// private final BundleContext bundleContext = FrameworkUtil.getBundle(getClass()).getBundleContext();
+
+ // private Resource fileRepositoryConfiguration = new ClassPathResource(
+ // "/org/argeo/cms/internal/kernel/repository-localfs.xml");
+
+ protected Repository getRepositoryByAlias(String alias) {
+ BundleContext bundleContext = CmsJcrActivator.getBundleContext();
+ if (bundleContext != null) {
+ try {
+ Collection<ServiceReference<Repository>> srs = bundleContext.getServiceReferences(Repository.class,
+ "(" + NodeConstants.CN + "=" + alias + ")");
+ if (srs.size() == 0)
+ throw new IllegalArgumentException("No repository with alias " + alias + " found in OSGi registry");
+ else if (srs.size() > 1)
+ throw new IllegalArgumentException(
+ srs.size() + " repositories with alias " + alias + " found in OSGi registry");
+ return bundleContext.getService(srs.iterator().next());
+ } catch (InvalidSyntaxException e) {
+ throw new IllegalArgumentException("Cannot find repository with alias " + alias, e);
+ }
+ } else {
+ // TODO ability to filter static services
+ return null;
+ }
+ }
+
+ // private void publish(String alias, Repository repository, Properties
+ // properties) {
+ // if (bundleContext != null) {
+ // // do not modify reference
+ // Hashtable<String, String> props = new Hashtable<String, String>();
+ // props.putAll(props);
+ // props.put(JCR_REPOSITORY_ALIAS, alias);
+ // bundleContext.registerService(Repository.class.getName(), repository,
+ // props);
+ // }
+ // }
+
+ @SuppressWarnings({ "rawtypes" })
+ public Repository getRepository(Map parameters) throws RepositoryException {
+ // // check if can be found by alias
+ // Repository repository = super.getRepository(parameters);
+ // if (repository != null)
+ // return repository;
+
+ // check if remote
+ Repository repository;
+ String uri = null;
+ if (parameters.containsKey(RepoConf.labeledUri.name()))
+ uri = parameters.get(NodeConstants.LABELED_URI).toString();
+ else if (parameters.containsKey(KernelConstants.JACKRABBIT_REPOSITORY_URI))
+ uri = parameters.get(KernelConstants.JACKRABBIT_REPOSITORY_URI).toString();
+
+ if (uri != null) {
+ if (uri.startsWith("http")) {// http, https
+ Object defaultWorkspace = parameters.get(RepoConf.defaultWorkspace.name());
+ repository = createRemoteRepository(uri, defaultWorkspace != null ? defaultWorkspace.toString() : null);
+ } else if (uri.startsWith("file"))// http, https
+ repository = createFileRepository(uri, parameters);
+ else if (uri.startsWith("vm")) {
+ // log.warn("URI " + uri + " should have been managed by generic
+ // JCR repository factory");
+ repository = getRepositoryByAlias(getAliasFromURI(uri));
+ } else
+ throw new IllegalArgumentException("Unrecognized URI format " + uri);
+
+ }
+
+ else if (parameters.containsKey(NodeConstants.CN)) {
+ // Properties properties = new Properties();
+ // properties.putAll(parameters);
+ String alias = parameters.get(NodeConstants.CN).toString();
+ // publish(alias, repository, properties);
+ // log.info("Registered JCR repository under alias '" + alias + "'
+ // with properties " + properties);
+ repository = getRepositoryByAlias(alias);
+ } else
+ throw new IllegalArgumentException("Not enough information in " + parameters);
+
+ if (repository == null)
+ throw new IllegalArgumentException("Repository not found " + parameters);
+
+ return repository;
+ }
+
+ protected Repository createRemoteRepository(String uri, String defaultWorkspace) throws RepositoryException {
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(KernelConstants.JACKRABBIT_REPOSITORY_URI, uri);
+ if (defaultWorkspace != null)
+ params.put(KernelConstants.JACKRABBIT_REMOTE_DEFAULT_WORKSPACE, defaultWorkspace);
+ Repository repository = new Jcr2davRepositoryFactory().getRepository(params);
+ if (repository == null)
+ throw new IllegalArgumentException("Remote Davex repository " + uri + " not found");
+ log.info("Initialized remote Jackrabbit repository from uri " + uri);
+ return repository;
+ }
+
+ @SuppressWarnings({ "rawtypes" })
+ protected Repository createFileRepository(final String uri, Map parameters) throws RepositoryException {
+ throw new UnsupportedOperationException();
+ // InputStream configurationIn = null;
+ // try {
+ // Properties vars = new Properties();
+ // vars.putAll(parameters);
+ // String dirPath = uri.substring("file:".length());
+ // File homeDir = new File(dirPath);
+ // if (homeDir.exists() && !homeDir.isDirectory())
+ // throw new ArgeoJcrException("Repository home " + dirPath + " is not a
+ // directory");
+ // if (!homeDir.exists())
+ // homeDir.mkdirs();
+ // configurationIn = fileRepositoryConfiguration.getInputStream();
+ // vars.put(RepositoryConfigurationParser.REPOSITORY_HOME_VARIABLE,
+ // homeDir.getCanonicalPath());
+ // RepositoryConfig repositoryConfig = RepositoryConfig.create(new
+ // InputSource(configurationIn), vars);
+ //
+ // // TransientRepository repository = new
+ // // TransientRepository(repositoryConfig);
+ // final RepositoryImpl repository =
+ // RepositoryImpl.create(repositoryConfig);
+ // Session session = repository.login();
+ // // FIXME make it generic
+ // org.argeo.jcr.JcrUtils.addPrivilege(session, "/", "ROLE_ADMIN",
+ // "jcr:all");
+ // org.argeo.jcr.JcrUtils.logoutQuietly(session);
+ // Runtime.getRuntime().addShutdownHook(new Thread("Clean JCR repository
+ // " + uri) {
+ // public void run() {
+ // repository.shutdown();
+ // log.info("Destroyed repository " + uri);
+ // }
+ // });
+ // log.info("Initialized file Jackrabbit repository from uri " + uri);
+ // return repository;
+ // } catch (Exception e) {
+ // throw new ArgeoJcrException("Cannot create repository " + uri, e);
+ // } finally {
+ // IOUtils.closeQuietly(configurationIn);
+ // }
+ }
+
+ protected String getAliasFromURI(String uri) {
+ try {
+ URI uriObj = new URI(uri);
+ String alias = uriObj.getPath();
+ if (alias.charAt(0) == '/')
+ alias = alias.substring(1);
+ if (alias.charAt(alias.length() - 1) == '/')
+ alias = alias.substring(0, alias.length() - 1);
+ return alias;
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException("Cannot interpret URI " + uri, e);
+ }
+ }
+
+ /**
+ * Called after the repository has been initialised. Does nothing by default.
+ */
+ @SuppressWarnings("rawtypes")
+ protected void postInitialization(Repository repository, Map parameters) {
+
+ }
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.net.URI;
+import java.util.Dictionary;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryFactory;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.core.RepositoryContext;
+import org.argeo.api.NodeConstants;
+import org.argeo.cms.internal.jcr.RepoConf;
+import org.argeo.cms.internal.jcr.RepositoryBuilder;
+import org.argeo.cms.jcr.internal.osgi.CmsJcrActivator;
+import org.argeo.util.LangUtils;
+import org.osgi.framework.Constants;
+import org.osgi.service.cm.ConfigurationException;
+import org.osgi.service.cm.ManagedServiceFactory;
+
+/** A {@link ManagedServiceFactory} creating or referencing JCR repositories. */
+public class RepositoryServiceFactory implements ManagedServiceFactory {
+ private final static Log log = LogFactory.getLog(RepositoryServiceFactory.class);
+// private final BundleContext bc = FrameworkUtil.getBundle(RepositoryServiceFactory.class).getBundleContext();
+
+ private Map<String, RepositoryContext> repositories = new HashMap<String, RepositoryContext>();
+ private Map<String, Object> pidToCn = new HashMap<String, Object>();
+
+ @Override
+ public String getName() {
+ return "Jackrabbit repository service factory";
+ }
+
+ @Override
+ public void updated(String pid, Dictionary<String, ?> properties) throws ConfigurationException {
+ if (repositories.containsKey(pid))
+ throw new IllegalArgumentException("Already a repository registered for " + pid);
+
+ if (properties == null)
+ return;
+
+ if (repositories.containsKey(pid)) {
+ log.warn("Ignore update of Jackrabbit repository " + pid);
+ return;
+ }
+
+ try {
+ Object labeledUri = properties.get(RepoConf.labeledUri.name());
+ if (labeledUri == null) {
+ RepositoryBuilder repositoryBuilder = new RepositoryBuilder();
+ RepositoryContext repositoryContext = repositoryBuilder.createRepositoryContext(properties);
+ repositories.put(pid, repositoryContext);
+ Dictionary<String, Object> props = LangUtils.dict(Constants.SERVICE_PID, pid);
+ // props.put(ArgeoJcrConstants.JCR_REPOSITORY_URI,
+ // properties.get(RepoConf.labeledUri.name()));
+ Object cn = properties.get(NodeConstants.CN);
+ if (cn != null) {
+ props.put(NodeConstants.CN, cn);
+ // props.put(NodeConstants.JCR_REPOSITORY_ALIAS, cn);
+ pidToCn.put(pid, cn);
+ }
+ CmsJcrActivator.registerService(RepositoryContext.class, repositoryContext, props);
+ } else {
+ try {
+ Object cn = properties.get(NodeConstants.CN);
+ Object defaultWorkspace = properties.get(RepoConf.defaultWorkspace.name());
+ if (defaultWorkspace == null)
+ defaultWorkspace = RepoConf.defaultWorkspace.getDefault();
+ URI uri = new URI(labeledUri.toString());
+// RepositoryFactory repositoryFactory = bc
+// .getService(bc.getServiceReference(RepositoryFactory.class));
+ RepositoryFactory repositoryFactory = CmsJcrActivator.getService(RepositoryFactory.class);
+ Map<String, String> parameters = new HashMap<String, String>();
+ parameters.put(RepoConf.labeledUri.name(), uri.toString());
+ parameters.put(RepoConf.defaultWorkspace.name(), defaultWorkspace.toString());
+ Repository repository = repositoryFactory.getRepository(parameters);
+ // Repository repository = NodeUtils.getRepositoryByUri(repositoryFactory,
+ // uri.toString());
+ Dictionary<String, Object> props = LangUtils.dict(Constants.SERVICE_PID, pid);
+ props.put(RepoConf.labeledUri.name(),
+ new URI(uri.getScheme(), null, uri.getHost(), uri.getPort(), uri.getPath(), null, null)
+ .toString());
+ if (cn != null) {
+ props.put(NodeConstants.CN, cn);
+ // props.put(NodeConstants.JCR_REPOSITORY_ALIAS, cn);
+ pidToCn.put(pid, cn);
+ }
+ CmsJcrActivator.registerService(Repository.class, repository, props);
+
+ // home
+ if (cn.equals(NodeConstants.NODE_REPOSITORY)) {
+ Dictionary<String, Object> homeProps = LangUtils.dict(NodeConstants.CN,
+ NodeConstants.EGO_REPOSITORY);
+ EgoRepository homeRepository = new EgoRepository(repository, true);
+ CmsJcrActivator.registerService(Repository.class, homeRepository, homeProps);
+ }
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+ } catch (Exception e) {
+ throw new IllegalStateException("Cannot create Jackrabbit repository " + pid, e);
+ }
+
+ }
+
+ @Override
+ public void deleted(String pid) {
+ RepositoryContext repositoryContext = repositories.remove(pid);
+ repositoryContext.getRepository().shutdown();
+ if (log.isDebugEnabled())
+ log.debug("Deleted repository " + pid);
+ }
+
+ public void shutdown() {
+ for (String pid : repositories.keySet()) {
+ try {
+ repositories.get(pid).getRepository().shutdown();
+ if (log.isDebugEnabled())
+ log.debug("Shut down repository " + pid
+ + (pidToCn.containsKey(pid) ? " (" + pidToCn.get(pid) + ")" : ""));
+ } catch (Exception e) {
+ log.error("Error when shutting down Jackrabbit repository " + pid, e);
+ }
+ }
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal;
+
+import java.io.File;
+import java.lang.management.ManagementFactory;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.api.stats.RepositoryStatistics;
+import org.apache.jackrabbit.stats.RepositoryStatisticsImpl;
+
+/**
+ * Background thread started by the kernel, which gather statistics and
+ * monitor/control other processes.
+ */
+public class StatisticsThread extends Thread {
+ private final static Log log = LogFactory.getLog(StatisticsThread.class);
+
+ private RepositoryStatisticsImpl repoStats;
+
+ /** The smallest period of operation, in ms */
+ private final long PERIOD = 60 * 1000l;
+ /** One ms in ns */
+ private final static long m = 1000l * 1000l;
+ private final static long M = 1024l * 1024l;
+
+ private boolean running = true;
+
+ private Log kernelStatsLog = LogFactory.getLog("argeo.stats.kernel");
+ private Log nodeStatsLog = LogFactory.getLog("argeo.stats.node");
+
+ @SuppressWarnings("unused")
+ private long cycle = 0l;
+
+ public StatisticsThread(String name) {
+ super(name);
+ }
+
+ private void doSmallestPeriod() {
+ // Clean expired sessions
+ // FIXME re-enable it in CMS
+ //CmsSessionImpl.closeInvalidSessions();
+
+ if (kernelStatsLog.isDebugEnabled()) {
+ StringBuilder line = new StringBuilder(64);
+ line.append("§\t");
+ long freeMem = Runtime.getRuntime().freeMemory() / M;
+ long totalMem = Runtime.getRuntime().totalMemory() / M;
+ long maxMem = Runtime.getRuntime().maxMemory() / M;
+ double loadAvg = ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage();
+ // in min
+ boolean min = true;
+ long uptime = ManagementFactory.getRuntimeMXBean().getUptime() / (1000 * 60);
+ if (uptime > 24 * 60) {
+ min = false;
+ uptime = uptime / 60;
+ }
+ line.append(uptime).append(min ? " min" : " h").append('\t');
+ line.append(loadAvg).append('\t').append(maxMem).append('\t').append(totalMem).append('\t').append(freeMem)
+ .append('\t');
+ kernelStatsLog.debug(line);
+ }
+
+ if (nodeStatsLog.isDebugEnabled()) {
+ File dataDir = KernelUtils.getOsgiInstanceDir();
+ long freeSpace = dataDir.getUsableSpace() / M;
+ // File currentRoot = null;
+ // for (File root : File.listRoots()) {
+ // String rootPath = root.getAbsolutePath();
+ // if (dataDir.getAbsolutePath().startsWith(rootPath)) {
+ // if (currentRoot == null
+ // || (rootPath.length() > currentRoot.getPath()
+ // .length())) {
+ // currentRoot = root;
+ // }
+ // }
+ // }
+ // long totalSpace = currentRoot.getTotalSpace();
+ StringBuilder line = new StringBuilder(128);
+ line.append("§\t").append(freeSpace).append(" MB left in " + dataDir);
+ line.append('\n');
+ if (repoStats != null)
+ for (RepositoryStatistics.Type type : RepositoryStatistics.Type.values()) {
+ long[] vals = repoStats.getTimeSeries(type).getValuePerMinute();
+ long val = vals[vals.length - 1];
+ line.append(type.name()).append('\t').append(val).append('\n');
+ }
+ nodeStatsLog.debug(line);
+ }
+ }
+
+ @Override
+ public void run() {
+ if (log.isTraceEnabled())
+ log.trace("Kernel thread started.");
+ final long periodNs = PERIOD * m;
+ while (running) {
+ long beginNs = System.nanoTime();
+ doSmallestPeriod();
+
+ long waitNs = periodNs - (System.nanoTime() - beginNs);
+ if (waitNs < 0)
+ continue;
+ // wait
+ try {
+ sleep(waitNs / m, (int) (waitNs % m));
+ } catch (InterruptedException e) {
+ // silent
+ }
+ cycle++;
+ }
+ }
+
+ public synchronized void destroyAndJoin() {
+ running = false;
+ notifyAll();
+// interrupt();
+// try {
+// join(PERIOD * 2);
+// } catch (InterruptedException e) {
+// // throw new CmsException("Kernel thread destruction was interrupted");
+// log.error("Kernel thread destruction was interrupted", e);
+// }
+ }
+}
--- /dev/null
+package org.argeo.cms.jcr.internal.osgi;
+
+import java.nio.file.spi.FileSystemProvider;
+import java.util.ArrayList;
+import java.util.Dictionary;
+import java.util.List;
+
+import javax.jcr.RepositoryFactory;
+
+import org.argeo.api.NodeConstants;
+import org.argeo.cms.jcr.internal.CmsFsProvider;
+import org.argeo.cms.jcr.internal.StatisticsThread;
+import org.argeo.cms.jcr.internal.NodeRepositoryFactory;
+import org.argeo.cms.jcr.internal.RepositoryServiceFactory;
+import org.argeo.util.LangUtils;
+import org.osgi.framework.Bundle;
+import org.osgi.framework.BundleActivator;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.Constants;
+import org.osgi.framework.FrameworkUtil;
+import org.osgi.service.cm.ManagedServiceFactory;
+
+public class CmsJcrActivator implements BundleActivator {
+ private static BundleContext bundleContext;
+ static {
+ Bundle bundle = FrameworkUtil.getBundle(CmsJcrActivator.class);
+ if (bundle != null) {
+ bundleContext = bundle.getBundleContext();
+ }
+ }
+
+// private List<Runnable> stopHooks = new ArrayList<>();
+ private StatisticsThread kernelThread;
+
+ private RepositoryServiceFactory repositoryServiceFactory;
+// private JcrDeployment jcrDeployment;
+
+ @Override
+ public void start(BundleContext context) throws Exception {
+ // kernel thread
+ kernelThread = new StatisticsThread("Kernel Thread");
+ kernelThread.setContextClassLoader(getClass().getClassLoader());
+ kernelThread.start();
+
+ // JCR
+ repositoryServiceFactory = new RepositoryServiceFactory();
+// stopHooks.add(() -> repositoryServiceFactory.shutdown());
+ registerService(ManagedServiceFactory.class, repositoryServiceFactory,
+ LangUtils.dict(Constants.SERVICE_PID, NodeConstants.NODE_REPOS_FACTORY_PID));
+
+ NodeRepositoryFactory repositoryFactory = new NodeRepositoryFactory();
+ registerService(RepositoryFactory.class, repositoryFactory, null);
+
+ // File System
+ CmsFsProvider cmsFsProvider = new CmsFsProvider();
+// ServiceLoader<FileSystemProvider> fspSl = ServiceLoader.load(FileSystemProvider.class);
+// for (FileSystemProvider fsp : fspSl) {
+// log.debug("FileSystemProvider " + fsp);
+// if (fsp instanceof CmsFsProvider) {
+// cmsFsProvider = (CmsFsProvider) fsp;
+// }
+// }
+// for (FileSystemProvider fsp : FileSystemProvider.installedProviders()) {
+// log.debug("Installed FileSystemProvider " + fsp);
+// }
+ registerService(FileSystemProvider.class, cmsFsProvider,
+ LangUtils.dict(Constants.SERVICE_PID, NodeConstants.NODE_FS_PROVIDER_PID));
+
+// jcrDeployment = new JcrDeployment();
+// jcrDeployment.init();
+ }
+
+ @Override
+ public void stop(BundleContext context) throws Exception {
+// if (jcrDeployment != null)
+// jcrDeployment.destroy();
+
+ if (repositoryServiceFactory != null)
+ repositoryServiceFactory.shutdown();
+
+ if (kernelThread != null)
+ kernelThread.destroyAndJoin();
+
+ }
+
+ @Deprecated
+ public static <T> void registerService(Class<T> clss, T service, Dictionary<String, ?> properties) {
+ if (bundleContext != null) {
+ bundleContext.registerService(clss, service, properties);
+ }
+
+ }
+
+ @Deprecated
+ public static BundleContext getBundleContext() {
+ return bundleContext;
+ }
+
+ @Deprecated
+ public static <T> T getService(Class<T> clss) {
+ if (bundleContext != null) {
+ return bundleContext.getService(bundleContext.getServiceReference(clss));
+ } else {
+ return null;
+ }
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal.servlet;
+
+import java.util.Map;
+
+import javax.jcr.Repository;
+
+import org.apache.jackrabbit.server.SessionProvider;
+import org.apache.jackrabbit.server.remoting.davex.JcrRemotingServlet;
+import org.argeo.api.NodeConstants;
+
+/** A {@link JcrRemotingServlet} based on {@link CmsSessionProvider}. */
+public class CmsRemotingServlet extends JcrRemotingServlet {
+ private static final long serialVersionUID = 6459455509684213633L;
+ private Repository repository;
+ private SessionProvider sessionProvider;
+
+ public CmsRemotingServlet() {
+ }
+
+ public CmsRemotingServlet(String alias, Repository repository) {
+ this.repository = repository;
+ this.sessionProvider = new CmsSessionProvider(alias);
+ }
+
+ @Override
+ public Repository getRepository() {
+ return repository;
+ }
+
+ public void setRepository(Repository repository, Map<String, String> properties) {
+ this.repository = repository;
+ String alias = properties.get(NodeConstants.CN);
+ if (alias != null)
+ sessionProvider = new CmsSessionProvider(alias);
+ else
+ throw new IllegalArgumentException("Only aliased repositories are supported");
+ }
+
+ @Override
+ protected SessionProvider getSessionProvider() {
+ return sessionProvider;
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal.servlet;
+
+import java.io.Serializable;
+import java.security.PrivilegedActionException;
+import java.security.PrivilegedExceptionAction;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.security.auth.Subject;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.server.SessionProvider;
+import org.argeo.api.NodeConstants;
+import org.argeo.cms.auth.CmsSession;
+import org.argeo.jcr.JcrUtils;
+
+/**
+ * Implements an open session in view patter: a new JCR session is created for
+ * each request
+ */
+public class CmsSessionProvider implements SessionProvider, Serializable {
+ private static final long serialVersionUID = -1358136599534938466L;
+
+ private final static Log log = LogFactory.getLog(CmsSessionProvider.class);
+
+ private final String alias;
+
+ private LinkedHashMap<Session, CmsDataSession> cmsSessions = new LinkedHashMap<>();
+
+ public CmsSessionProvider(String alias) {
+ this.alias = alias;
+ }
+
+ public Session getSession(HttpServletRequest request, Repository rep, String workspace)
+ throws javax.jcr.LoginException, ServletException, RepositoryException {
+
+ // a client is scanning parent URLs.
+// if (workspace == null)
+// return null;
+
+// CmsSessionImpl cmsSession = WebCmsSessionImpl.getCmsSession(request);
+ // FIXME retrieve CMS session
+ CmsSession cmsSession = null;
+ if (log.isTraceEnabled()) {
+ log.trace("Get JCR session from " + cmsSession);
+ }
+ if (cmsSession == null)
+ throw new IllegalStateException("Cannot find a session for request " + request.getRequestURI());
+ CmsDataSession cmsDataSession = new CmsDataSession(cmsSession);
+ Session session = cmsDataSession.getDataSession(alias, workspace, rep);
+ cmsSessions.put(session, cmsDataSession);
+ return session;
+ }
+
+ public void releaseSession(Session session) {
+// JcrUtils.logoutQuietly(session);
+ if (cmsSessions.containsKey(session)) {
+ CmsDataSession cmsDataSession = cmsSessions.get(session);
+ cmsDataSession.releaseDataSession(alias, session);
+ } else {
+ log.warn("JCR session " + session + " not found in CMS session list. Logging it out...");
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ static class CmsDataSession {
+ private CmsSession cmsSession;
+
+ private Map<String, Session> dataSessions = new HashMap<>();
+ private Set<String> dataSessionsInUse = new HashSet<>();
+ private Set<Session> additionalDataSessions = new HashSet<>();
+
+ private CmsDataSession(CmsSession cmsSession) {
+ this.cmsSession = cmsSession;
+ }
+
+ public Session newDataSession(String cn, String workspace, Repository repository) {
+ checkValid();
+ return login(repository, workspace);
+ }
+
+ public synchronized Session getDataSession(String cn, String workspace, Repository repository) {
+ checkValid();
+ // FIXME make it more robust
+ if (workspace == null)
+ workspace = NodeConstants.SYS_WORKSPACE;
+ String path = cn + '/' + workspace;
+ if (dataSessionsInUse.contains(path)) {
+ try {
+ wait(1000);
+ if (dataSessionsInUse.contains(path)) {
+ Session session = login(repository, workspace);
+ additionalDataSessions.add(session);
+ if (log.isTraceEnabled())
+ log.trace("Additional data session " + path + " for " + cmsSession.getUserDn());
+ return session;
+ }
+ } catch (InterruptedException e) {
+ // silent
+ }
+ }
+
+ Session session = null;
+ if (dataSessions.containsKey(path)) {
+ session = dataSessions.get(path);
+ } else {
+ session = login(repository, workspace);
+ dataSessions.put(path, session);
+ if (log.isTraceEnabled())
+ log.trace("New data session " + path + " for " + cmsSession.getUserDn());
+ }
+ dataSessionsInUse.add(path);
+ return session;
+ }
+
+ private Session login(Repository repository, String workspace) {
+ try {
+ return Subject.doAs(cmsSession.getSubject(), new PrivilegedExceptionAction<Session>() {
+ @Override
+ public Session run() throws Exception {
+ return repository.login(workspace);
+ }
+ });
+ } catch (PrivilegedActionException e) {
+ throw new IllegalStateException("Cannot log in " + cmsSession.getUserDn() + " to JCR", e);
+ }
+ }
+
+ public synchronized void releaseDataSession(String cn, Session session) {
+ if (additionalDataSessions.contains(session)) {
+ JcrUtils.logoutQuietly(session);
+ additionalDataSessions.remove(session);
+ if (log.isTraceEnabled())
+ log.trace("Remove additional data session " + session);
+ return;
+ }
+ String path = cn + '/' + session.getWorkspace().getName();
+ if (!dataSessionsInUse.contains(path))
+ log.warn("Data session " + path + " was not in use for " + cmsSession.getUserDn());
+ dataSessionsInUse.remove(path);
+ Session registeredSession = dataSessions.get(path);
+ if (session != registeredSession)
+ log.warn("Data session " + path + " not consistent for " + cmsSession.getUserDn());
+ if (log.isTraceEnabled())
+ log.trace("Released data session " + session + " for " + path);
+ notifyAll();
+ }
+
+ private void checkValid() {
+ if (!cmsSession.isValid())
+ throw new IllegalStateException(
+ "CMS session " + cmsSession.getUuid() + " is not valid since " + cmsSession.getEnd());
+ }
+
+ private void close() {
+ // FIXME class this when CMS session is closed
+ synchronized (this) {
+ // TODO check data session in use ?
+ for (String path : dataSessions.keySet())
+ JcrUtils.logoutQuietly(dataSessions.get(path));
+ for (Session session : additionalDataSessions)
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+ }
+}
--- /dev/null
+package org.argeo.cms.jcr.internal.servlet;
+
+import java.util.Map;
+
+import javax.jcr.Repository;
+
+import org.apache.jackrabbit.webdav.simple.SimpleWebdavServlet;
+import org.argeo.api.NodeConstants;
+
+/** A {@link SimpleWebdavServlet} based on {@link CmsSessionProvider}. */
+public class CmsWebDavServlet extends SimpleWebdavServlet {
+ private static final long serialVersionUID = 7485800288686328063L;
+ private Repository repository;
+
+ public CmsWebDavServlet() {
+ }
+
+ public CmsWebDavServlet(String alias, Repository repository) {
+ this.repository = repository;
+ setSessionProvider(new CmsSessionProvider(alias));
+ }
+
+ @Override
+ public Repository getRepository() {
+ return repository;
+ }
+
+ public void setRepository(Repository repository, Map<String, String> properties) {
+ this.repository = repository;
+ String alias = properties.get(NodeConstants.CN);
+ if (alias != null)
+ setSessionProvider(new CmsSessionProvider(alias));
+ else
+ throw new IllegalArgumentException("Only aliased repositories are supported");
+ }
+
+}
--- /dev/null
+package org.argeo.cms.jcr.internal.servlet;
+
+import java.util.Enumeration;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.logging.Log;
+
+public class JcrHttpUtils {
+ public final static String HEADER_AUTHORIZATION = "Authorization";
+ public final static String HEADER_WWW_AUTHENTICATE = "WWW-Authenticate";
+
+ public final static String DEFAULT_PROTECTED_HANDLERS = "/org/argeo/cms/jcr/internal/servlet/protectedHandlers.xml";
+ public final static String WEBDAV_CONFIG = "/org/argeo/cms/jcr/internal/servlet/webdav-config.xml";
+
+ static boolean isBrowser(String userAgent) {
+ return userAgent.contains("webkit") || userAgent.contains("gecko") || userAgent.contains("firefox")
+ || userAgent.contains("msie") || userAgent.contains("chrome") || userAgent.contains("chromium")
+ || userAgent.contains("opera") || userAgent.contains("browser");
+ }
+
+ public static void logResponseHeaders(Log log, HttpServletResponse response) {
+ if (!log.isDebugEnabled())
+ return;
+ for (String headerName : response.getHeaderNames()) {
+ Object headerValue = response.getHeader(headerName);
+ log.debug(headerName + ": " + headerValue);
+ }
+ }
+
+ public static void logRequestHeaders(Log log, HttpServletRequest request) {
+ if (!log.isDebugEnabled())
+ return;
+ for (Enumeration<String> headerNames = request.getHeaderNames(); headerNames.hasMoreElements();) {
+ String headerName = headerNames.nextElement();
+ Object headerValue = request.getHeader(headerName);
+ log.debug(headerName + ": " + headerValue);
+ }
+ log.debug(request.getRequestURI() + "\n");
+ }
+
+ public static void logRequest(Log log, HttpServletRequest request) {
+ log.debug("contextPath=" + request.getContextPath());
+ log.debug("servletPath=" + request.getServletPath());
+ log.debug("requestURI=" + request.getRequestURI());
+ log.debug("queryString=" + request.getQueryString());
+ StringBuilder buf = new StringBuilder();
+ // headers
+ Enumeration<String> en = request.getHeaderNames();
+ while (en.hasMoreElements()) {
+ String header = en.nextElement();
+ Enumeration<String> values = request.getHeaders(header);
+ while (values.hasMoreElements())
+ buf.append(" " + header + ": " + values.nextElement());
+ buf.append('\n');
+ }
+
+ // attributed
+ Enumeration<String> an = request.getAttributeNames();
+ while (an.hasMoreElements()) {
+ String attr = an.nextElement();
+ Object value = request.getAttribute(attr);
+ buf.append(" " + attr + ": " + value);
+ buf.append('\n');
+ }
+ log.debug("\n" + buf);
+ }
+
+ private JcrHttpUtils() {
+
+ }
+}
--- /dev/null
+package org.argeo.cms.jcr.internal.servlet;
+
+import static javax.jcr.Property.JCR_DESCRIPTION;
+import static javax.jcr.Property.JCR_LAST_MODIFIED;
+import static javax.jcr.Property.JCR_TITLE;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+import java.util.Calendar;
+import java.util.Collection;
+
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.security.auth.Subject;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.argeo.api.NodeConstants;
+import org.argeo.api.NodeUtils;
+import org.argeo.cms.CmsException;
+import org.argeo.jcr.JcrUtils;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.FrameworkUtil;
+import org.osgi.framework.ServiceReference;
+
+public class LinkServlet extends HttpServlet {
+ private final BundleContext bc = FrameworkUtil.getBundle(getClass()).getBundleContext();
+
+ private static final long serialVersionUID = 3749990143146845708L;
+
+ @Override
+ protected void service(HttpServletRequest request, HttpServletResponse response)
+ throws ServletException, IOException {
+ String path = request.getPathInfo();
+ String userAgent = request.getHeader("User-Agent").toLowerCase();
+ boolean isBot = false;
+ // boolean isCompatibleBrowser = false;
+ if (userAgent.contains("bot") || userAgent.contains("facebook") || userAgent.contains("twitter")) {
+ isBot = true;
+ }
+ // else if (userAgent.contains("webkit") ||
+ // userAgent.contains("gecko") || userAgent.contains("firefox")
+ // || userAgent.contains("msie") || userAgent.contains("chrome") ||
+ // userAgent.contains("chromium")
+ // || userAgent.contains("opera") || userAgent.contains("browser"))
+ // {
+ // isCompatibleBrowser = true;
+ // }
+
+ if (isBot) {
+ // log.warn("# BOT " + request.getHeader("User-Agent"));
+ canonicalAnswer(request, response, path);
+ return;
+ }
+
+ // if (isCompatibleBrowser && log.isTraceEnabled())
+ // log.trace("# BWS " + request.getHeader("User-Agent"));
+ redirectTo(response, "/#" + path);
+ }
+
+ private void redirectTo(HttpServletResponse response, String location) {
+ response.setHeader("Location", location);
+ response.setStatus(HttpServletResponse.SC_FOUND);
+ }
+
+ // private boolean canonicalAnswerNeededBy(HttpServletRequest request) {
+ // String userAgent = request.getHeader("User-Agent").toLowerCase();
+ // return userAgent.startsWith("facebookexternalhit/");
+ // }
+
+ /** For bots which don't understand RWT. */
+ private void canonicalAnswer(HttpServletRequest request, HttpServletResponse response, String path) {
+ Session session = null;
+ try {
+ PrintWriter writer = response.getWriter();
+ session = Subject.doAs(anonymousLogin(), new PrivilegedExceptionAction<Session>() {
+
+ @Override
+ public Session run() throws Exception {
+ Collection<ServiceReference<Repository>> srs = bc.getServiceReferences(Repository.class,
+ "(" + NodeConstants.CN + "=" + NodeConstants.EGO_REPOSITORY + ")");
+ Repository repository = bc.getService(srs.iterator().next());
+ return repository.login();
+ }
+
+ });
+ Node node = session.getNode(path);
+ String title = node.hasProperty(JCR_TITLE) ? node.getProperty(JCR_TITLE).getString() : node.getName();
+ String desc = node.hasProperty(JCR_DESCRIPTION) ? node.getProperty(JCR_DESCRIPTION).getString() : null;
+ Calendar lastUpdate = node.hasProperty(JCR_LAST_MODIFIED) ? node.getProperty(JCR_LAST_MODIFIED).getDate()
+ : null;
+ String url = getCanonicalUrl(node, request);
+ String imgUrl = null;
+ // TODO support images
+// loop: for (NodeIterator it = node.getNodes(); it.hasNext();) {
+// // Takes the first found cms:image
+// Node child = it.nextNode();
+// if (child.isNodeType(CMS_IMAGE)) {
+// imgUrl = getDataUrl(child, request);
+// break loop;
+// }
+// }
+ StringBuilder buf = new StringBuilder();
+ buf.append("<html>");
+ buf.append("<head>");
+ writeMeta(buf, "og:title", escapeHTML(title));
+ writeMeta(buf, "og:type", "website");
+ buf.append("<meta name='twitter:card' content='summary' />");
+ buf.append("<meta name='twitter:site' content='@argeo_org' />");
+ writeMeta(buf, "og:url", url);
+ if (desc != null)
+ writeMeta(buf, "og:description", escapeHTML(desc));
+ if (imgUrl != null)
+ writeMeta(buf, "og:image", imgUrl);
+ if (lastUpdate != null)
+ writeMeta(buf, "og:updated_time", Long.toString(lastUpdate.getTime().getTime()));
+ buf.append("</head>");
+ buf.append("<body>");
+ buf.append("<p><b>!! This page is meant for indexing robots, not for real people," + " visit <a href='/#")
+ .append(path).append("'>").append(escapeHTML(title)).append("</a> instead.</b></p>");
+ writeCanonical(buf, node);
+ buf.append("</body>");
+ buf.append("</html>");
+ writer.print(buf.toString());
+
+ response.setHeader("Content-Type", "text/html");
+ writer.flush();
+ } catch (Exception e) {
+ throw new CmsException("Cannot write canonical answer", e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ /**
+ * From http://stackoverflow.com/questions/1265282/recommended-method-for-
+ * escaping-html-in-java (+ escaping '). TODO Use
+ * org.apache.commons.lang.StringEscapeUtils
+ */
+ private String escapeHTML(String s) {
+ StringBuilder out = new StringBuilder(Math.max(16, s.length()));
+ for (int i = 0; i < s.length(); i++) {
+ char c = s.charAt(i);
+ if (c > 127 || c == '\'' || c == '"' || c == '<' || c == '>' || c == '&') {
+ out.append("&#");
+ out.append((int) c);
+ out.append(';');
+ } else {
+ out.append(c);
+ }
+ }
+ return out.toString();
+ }
+
+ private void writeMeta(StringBuilder buf, String tag, String value) {
+ buf.append("<meta property='").append(tag).append("' content='").append(value).append("'/>");
+ }
+
+ private void writeCanonical(StringBuilder buf, Node node) throws RepositoryException {
+ buf.append("<div>");
+ if (node.hasProperty(JCR_TITLE))
+ buf.append("<p>").append(node.getProperty(JCR_TITLE).getString()).append("</p>");
+ if (node.hasProperty(JCR_DESCRIPTION))
+ buf.append("<p>").append(node.getProperty(JCR_DESCRIPTION).getString()).append("</p>");
+ NodeIterator children = node.getNodes();
+ while (children.hasNext()) {
+ writeCanonical(buf, children.nextNode());
+ }
+ buf.append("</div>");
+ }
+
+ // DATA
+ private StringBuilder getServerBaseUrl(HttpServletRequest request) {
+ try {
+ URL url = new URL(request.getRequestURL().toString());
+ StringBuilder buf = new StringBuilder();
+ buf.append(url.getProtocol()).append("://").append(url.getHost());
+ if (url.getPort() != -1)
+ buf.append(':').append(url.getPort());
+ return buf;
+ } catch (MalformedURLException e) {
+ throw new CmsException("Cannot extract server base URL from " + request.getRequestURL(), e);
+ }
+ }
+
+ private String getDataUrl(Node node, HttpServletRequest request) throws RepositoryException {
+ try {
+ StringBuilder buf = getServerBaseUrl(request);
+ buf.append(NodeUtils.getDataPath(NodeConstants.EGO_REPOSITORY, node));
+ return new URL(buf.toString()).toString();
+ } catch (MalformedURLException e) {
+ throw new CmsException("Cannot build data URL for " + node, e);
+ }
+ }
+
+ // public static String getDataPath(Node node) throws
+ // RepositoryException {
+ // assert node != null;
+ // String userId = node.getSession().getUserID();
+ //// if (log.isTraceEnabled())
+ //// log.trace(userId + " : " + node.getPath());
+ // StringBuilder buf = new StringBuilder();
+ // boolean isAnonymous =
+ // userId.equalsIgnoreCase(NodeConstants.ROLE_ANONYMOUS);
+ // if (isAnonymous)
+ // buf.append(WEBDAV_PUBLIC);
+ // else
+ // buf.append(WEBDAV_PRIVATE);
+ // Session session = node.getSession();
+ // Repository repository = session.getRepository();
+ // String cn;
+ // if (repository.isSingleValueDescriptor(NodeConstants.CN)) {
+ // cn = repository.getDescriptor(NodeConstants.CN);
+ // } else {
+ //// log.warn("No cn defined in repository, using " +
+ // NodeConstants.NODE);
+ // cn = NodeConstants.NODE;
+ // }
+ // return
+ // buf.append('/').append(cn).append('/').append(session.getWorkspace().getName()).append(node.getPath())
+ // .toString();
+ // }
+
+ private String getCanonicalUrl(Node node, HttpServletRequest request) throws RepositoryException {
+ try {
+ StringBuilder buf = getServerBaseUrl(request);
+ buf.append('/').append('!').append(node.getPath());
+ return new URL(buf.toString()).toString();
+ } catch (MalformedURLException e) {
+ throw new CmsException("Cannot build data URL for " + node, e);
+ }
+ // return request.getRequestURL().append('!').append(node.getPath())
+ // .toString();
+ }
+
+ private Subject anonymousLogin() {
+ Subject subject = new Subject();
+ LoginContext lc;
+ try {
+ lc = new LoginContext(NodeConstants.LOGIN_CONTEXT_ANONYMOUS, subject);
+ lc.login();
+ return subject;
+ } catch (LoginException e) {
+ throw new CmsException("Cannot login as anonymous", e);
+ }
+ }
+
+}
--- /dev/null
+<config>
+ <protecteditemremovehandler>
+ <class name="org.apache.jackrabbit.server.remoting.davex.AclRemoveHandler" />
+ </protecteditemremovehandler>
+</config>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ -->
+<!--
+<!DOCTYPE config [
+ <!ELEMENT config (iomanager , propertymanager, (collection | noncollection)? , filter?, mimetypeproperties?) >
+
+ <!ELEMENT iomanager (class, iohandler*) >
+ <!ELEMENT iohandler (class) >
+
+ <!ELEMENT propertymanager (class, propertyhandler*) >
+ <!ELEMENT propertyhandler (class) >
+
+ <!ELEMENT collection (nodetypes) >
+ <!ELEMENT noncollection (nodetypes) >
+
+ <!ELEMENT filter (class, namespaces?, nodetypes?) >
+
+ <!ELEMENT class >
+ <!ATTLIST class
+ name CDATA #REQUIRED
+ >
+ <!ELEMENT namespaces (prefix | uri)* >
+ <!ELEMENT prefix (CDATA) >
+ <!ELEMENT uri (CDATA) >
+
+ <!ELEMENT nodetypes (nodetype)* >
+ <!ELEMENT nodetype (CDATA) >
+
+ <!ELEMENT mimetypeproperties (mimemapping*, defaultmimetype) >
+
+ <!ELEMENT mimemapping >
+ <!ATTLIST mimemapping
+ extension CDATA #REQUIRED
+ mimetype CDATA #REQUIRED
+ >
+
+ <!ELEMENT defaultmimetype (CDATA) >
+]>
+-->
+
+<config>
+ <!--
+ Defines the IOManager implementation that is responsible for passing
+ import/export request to the individual IO-handlers.
+ -->
+ <iomanager>
+ <!-- class element defines the manager to be used. The specified class
+ must implement the IOManager interface.
+ Note, that the handlers are being added and called in the order
+ they appear in the configuration.
+ -->
+ <class name="org.apache.jackrabbit.server.io.IOManagerImpl" />
+ <iohandler>
+ <class name="org.apache.jackrabbit.server.io.VersionHandler" />
+ </iohandler>
+ <iohandler>
+ <class name="org.apache.jackrabbit.server.io.VersionHistoryHandler" />
+ </iohandler>
+<!-- <iohandler> -->
+<!-- <class name="org.apache.jackrabbit.server.io.ZipHandler" /> -->
+<!-- </iohandler> -->
+<!-- <iohandler> -->
+<!-- <class name="org.apache.jackrabbit.server.io.XmlHandler" /> -->
+<!-- </iohandler> -->
+ <iohandler>
+ <class name="org.apache.jackrabbit.server.io.DirListingExportHandler" />
+ </iohandler>
+ <iohandler>
+ <class name="org.apache.jackrabbit.server.io.DefaultHandler" />
+ </iohandler>
+ </iomanager>
+ <!--
+ Example config for iomanager that populates its list of handlers with
+ default values. Therefore the 'iohandler' elements are omited.
+ -->
+ <!--
+ <iomanager>
+ <class name="org.apache.jackrabbit.server.io.DefaultIOManager" />
+ </iomanager>
+ -->
+ <!--
+ Defines the PropertyManager implementation that is responsible for export
+ and import of resource properties.
+ -->
+ <propertymanager>
+ <!-- class element defines the manager to be used. The specified class
+ must implement the PropertyManager interface.
+ Note, that the handlers are being added and called in the order
+ they appear in the configuration.
+ -->
+ <class name="org.apache.jackrabbit.server.io.PropertyManagerImpl" />
+ <propertyhandler>
+ <class name="org.apache.jackrabbit.server.io.VersionHandler" />
+ </propertyhandler>
+ <propertyhandler>
+ <class name="org.apache.jackrabbit.server.io.VersionHistoryHandler" />
+ </propertyhandler>
+<!-- <propertyhandler> -->
+<!-- <class name="org.apache.jackrabbit.server.io.ZipHandler" /> -->
+<!-- </propertyhandler> -->
+<!-- <propertyhandler> -->
+<!-- <class name="org.apache.jackrabbit.server.io.XmlHandler" /> -->
+<!-- </propertyhandler> -->
+ <propertyhandler>
+ <class name="org.apache.jackrabbit.server.io.DefaultHandler" />
+ </propertyhandler>
+ </propertymanager>
+ <!--
+ Define nodetypes, that should never by displayed as 'collection'
+ -->
+ <noncollection>
+ <nodetypes>
+ <nodetype>nt:file</nodetype>
+ <nodetype>nt:resource</nodetype>
+ </nodetypes>
+ </noncollection>
+ <!--
+ Example: Defines nodetypes, that should always be displayed as 'collection'.
+ -->
+ <!--
+ <collection>
+ <nodetypes>
+ <nodetype>nt:folder</nodetype>
+ <nodetype>rep:root</nodetype>
+ </nodetypes>
+ </collection>
+ -->
+ <!--
+ Filter that allows to prevent certain items from being displayed.
+ Please note, that this has an effect on PROPFIND calls only and does not
+ provide limited access to those items matching any of the filters.
+
+ However specifying a filter may cause problems with PUT or MKCOL if the
+ resource to be created is being filtered out, thus resulting in inconsistent
+ responses (e.g. PUT followed by PROPFIND on parent).
+ -->
+ <filter>
+ <!-- class element defines the resource filter to be used. The specified class
+ must implement the ItemFilter interface -->
+ <class name="org.apache.jackrabbit.webdav.simple.DefaultItemFilter" />
+ <!--
+ Nodetype names to be used to filter child nodes.
+ A child node can be filtered if the declaring nodetype of its definition
+ is one of the nodetype names specified in the nodetypes Element.
+ E.g. defining 'rep:root' as filtered nodetype whould result in jcr:system
+ being hidden but no other child node of the root node, since those
+ are defined by the nodetype nt:unstructered.
+ -->
+ <!--
+ <nodetypes>
+ <nodetype>rep:root</nodetype>
+ </nodetypes>
+ -->
+ <!--
+ Namespace prefixes or uris. Items having a name that matches any of the
+ entries will be filtered.
+ -->
+ <namespaces>
+ <prefix>rep</prefix>
+ <prefix>jcr</prefix>
+ <!-- Argeo namespaces -->
+ <prefix>node</prefix>
+ <prefix>argeo</prefix>
+ <prefix>cms</prefix>
+ <prefix>slc</prefix>
+ <prefix>connect</prefix>
+ <prefix>activities</prefix>
+ <prefix>people</prefix>
+ <prefix>documents</prefix>
+ <prefix>tracker</prefix>
+ <!--
+ <uri>internal</uri>
+ <uri>http://www.jcp.org/jcr/1.0</uri>
+ -->
+ </namespaces>
+ </filter>
+
+ <!--
+ Optional 'mimetypeproperties' element.
+ It defines additional or replaces existing mappings for the MimeResolver
+ instance created by the ResourceConfig.
+ The default mappings are defined in org.apache.jackrabbit.server.io.mimetypes.properties.
+ If the default mime type defined by MimeResolver is 'application/octet-stream'.
+ -->
+ <!--
+ <mimetypeproperties>
+ <mimemapping extension="rtf" mimetype="application/rtf" />
+ <mimemapping extension="ott" mimetype="application/vnd.oasis.opendocument.text-template" />
+ <defaultmimetype>text/html</defaultmimetype>
+ </mimetypeproperties>
+ -->
+</config>
--- /dev/null
+<ldap = 'http://www.argeo.org/ns/ldap'>
--- /dev/null
+<node = 'http://www.argeo.org/ns/node'>
+
+[node:userHome]
+mixin
+- ldap:uid (STRING) m
+
+[node:groupHome]
+mixin
+- ldap:cn (STRING) m
--- /dev/null
+package org.argeo.cms.tabular;
+
+import java.io.OutputStream;
+
+import org.argeo.api.tabular.TabularWriter;
+import org.argeo.util.CsvWriter;
+
+/** Write tabular content in a stream as CSV. Wraps a {@link CsvWriter}. */
+public class CsvTabularWriter implements TabularWriter {
+ private CsvWriter csvWriter;
+
+ public CsvTabularWriter(OutputStream out) {
+ this.csvWriter = new CsvWriter(out);
+ }
+
+ public void appendRow(Object[] row) {
+ csvWriter.writeLine(row);
+ }
+
+ public void close() {
+ }
+
+}
--- /dev/null
+package org.argeo.cms.tabular;
+
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ArrayBlockingQueue;
+
+import javax.jcr.Binary;
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.Property;
+import javax.jcr.PropertyType;
+import javax.jcr.RepositoryException;
+
+import org.apache.commons.io.IOUtils;
+import org.argeo.api.tabular.ArrayTabularRow;
+import org.argeo.api.tabular.TabularColumn;
+import org.argeo.api.tabular.TabularRow;
+import org.argeo.api.tabular.TabularRowIterator;
+import org.argeo.cms.ArgeoTypes;
+import org.argeo.jcr.JcrException;
+import org.argeo.util.CsvParser;
+
+/** Iterates over the rows of a {@link ArgeoTypes#ARGEO_TABLE} node. */
+public class JcrTabularRowIterator implements TabularRowIterator {
+ private Boolean hasNext = null;
+ private Boolean parsingCompleted = false;
+
+ private Long currentRowNumber = 0l;
+
+ private List<TabularColumn> header = new ArrayList<TabularColumn>();
+
+ /** referenced so that we can close it */
+ private Binary binary;
+ private InputStream in;
+
+ private CsvParser csvParser;
+ private ArrayBlockingQueue<List<String>> textLines;
+
+ public JcrTabularRowIterator(Node tableNode) {
+ try {
+ for (NodeIterator it = tableNode.getNodes(); it.hasNext();) {
+ Node node = it.nextNode();
+ if (node.isNodeType(ArgeoTypes.ARGEO_COLUMN)) {
+ Integer type = PropertyType.valueFromName(node.getProperty(
+ Property.JCR_REQUIRED_TYPE).getString());
+ TabularColumn tc = new TabularColumn(node.getProperty(
+ Property.JCR_TITLE).getString(), type);
+ header.add(tc);
+ }
+ }
+ Node contentNode = tableNode.getNode(Property.JCR_CONTENT);
+ if (contentNode.isNodeType(ArgeoTypes.ARGEO_CSV)) {
+ textLines = new ArrayBlockingQueue<List<String>>(1000);
+ csvParser = new CsvParser() {
+ protected void processLine(Integer lineNumber,
+ List<String> header, List<String> tokens) {
+ try {
+ textLines.put(tokens);
+ } catch (InterruptedException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ // textLines.add(tokens);
+ if (hasNext == null) {
+ hasNext = true;
+ synchronized (JcrTabularRowIterator.this) {
+ JcrTabularRowIterator.this.notifyAll();
+ }
+ }
+ }
+ };
+ csvParser.setNoHeader(true);
+ binary = contentNode.getProperty(Property.JCR_DATA).getBinary();
+ in = binary.getStream();
+ Thread thread = new Thread(contentNode.getPath() + " reader") {
+ public void run() {
+ try {
+ csvParser.parse(in);
+ } finally {
+ parsingCompleted = true;
+ IOUtils.closeQuietly(in);
+ }
+ }
+ };
+ thread.start();
+ }
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot read table " + tableNode, e);
+ }
+ }
+
+ public synchronized boolean hasNext() {
+ // we don't know if there is anything available
+ // while (hasNext == null)
+ // try {
+ // wait();
+ // } catch (InterruptedException e) {
+ // // silent
+ // // FIXME better deal with interruption
+ // Thread.currentThread().interrupt();
+ // break;
+ // }
+
+ // buffer not empty
+ if (!textLines.isEmpty())
+ return true;
+
+ // maybe the parsing is finished but the flag has not been set
+ while (!parsingCompleted && textLines.isEmpty())
+ try {
+ wait(100);
+ } catch (InterruptedException e) {
+ // silent
+ // FIXME better deal with interruption
+ Thread.currentThread().interrupt();
+ break;
+ }
+
+ // buffer not empty
+ if (!textLines.isEmpty())
+ return true;
+
+ // (parsingCompleted && textLines.isEmpty())
+ return false;
+
+ // if (!hasNext && textLines.isEmpty()) {
+ // if (in != null) {
+ // IOUtils.closeQuietly(in);
+ // in = null;
+ // }
+ // if (binary != null) {
+ // JcrUtils.closeQuietly(binary);
+ // binary = null;
+ // }
+ // return false;
+ // } else
+ // return true;
+ }
+
+ public synchronized TabularRow next() {
+ try {
+ List<String> tokens = textLines.take();
+ List<Object> objs = new ArrayList<Object>(tokens.size());
+ for (String token : tokens) {
+ // TODO convert to other formats using header
+ objs.add(token);
+ }
+ currentRowNumber++;
+ return new ArrayTabularRow(objs);
+ } catch (InterruptedException e) {
+ // silent
+ // FIXME better deal with interruption
+ }
+ return null;
+ }
+
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
+
+ public Long getCurrentRowNumber() {
+ return currentRowNumber;
+ }
+
+ public List<TabularColumn> getHeader() {
+ return header;
+ }
+
+}
--- /dev/null
+package org.argeo.cms.tabular;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.InputStream;
+import java.util.List;
+
+import javax.jcr.Binary;
+import javax.jcr.Node;
+import javax.jcr.Property;
+import javax.jcr.PropertyType;
+import javax.jcr.RepositoryException;
+
+import org.apache.commons.io.IOUtils;
+import org.argeo.api.tabular.TabularColumn;
+import org.argeo.api.tabular.TabularWriter;
+import org.argeo.cms.ArgeoTypes;
+import org.argeo.jcr.JcrException;
+import org.argeo.jcr.JcrUtils;
+import org.argeo.util.CsvWriter;
+
+/** Write / reference tabular content in a JCR repository. */
+public class JcrTabularWriter implements TabularWriter {
+ private Node contentNode;
+ private ByteArrayOutputStream out;
+ private CsvWriter csvWriter;
+
+ @SuppressWarnings("unused")
+ private final List<TabularColumn> columns;
+
+ /** Creates a table node */
+ public JcrTabularWriter(Node tableNode, List<TabularColumn> columns,
+ String contentNodeType) {
+ try {
+ this.columns = columns;
+ for (TabularColumn column : columns) {
+ String normalized = JcrUtils.replaceInvalidChars(column
+ .getName());
+ Node columnNode = tableNode.addNode(normalized,
+ ArgeoTypes.ARGEO_COLUMN);
+ columnNode.setProperty(Property.JCR_TITLE, column.getName());
+ if (column.getType() != null)
+ columnNode.setProperty(Property.JCR_REQUIRED_TYPE,
+ PropertyType.nameFromValue(column.getType()));
+ else
+ columnNode.setProperty(Property.JCR_REQUIRED_TYPE,
+ PropertyType.TYPENAME_STRING);
+ }
+ contentNode = tableNode.addNode(Property.JCR_CONTENT,
+ contentNodeType);
+ if (contentNodeType.equals(ArgeoTypes.ARGEO_CSV)) {
+ contentNode.setProperty(Property.JCR_MIMETYPE, "text/csv");
+ contentNode.setProperty(Property.JCR_ENCODING, "UTF-8");
+ out = new ByteArrayOutputStream();
+ csvWriter = new CsvWriter(out);
+ }
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot create table node " + tableNode, e);
+ }
+ }
+
+ public void appendRow(Object[] row) {
+ csvWriter.writeLine(row);
+ }
+
+ public void close() {
+ Binary binary = null;
+ InputStream in = null;
+ try {
+ // TODO parallelize with pipes and writing from another thread
+ in = new ByteArrayInputStream(out.toByteArray());
+ binary = contentNode.getSession().getValueFactory()
+ .createBinary(in);
+ contentNode.setProperty(Property.JCR_DATA, binary);
+ } catch (RepositoryException e) {
+ throw new JcrException("Cannot store data in " + contentNode, e);
+ } finally {
+ IOUtils.closeQuietly(in);
+ JcrUtils.closeQuietly(binary);
+ }
+ }
+}
--- /dev/null
+/** Argeo CMS implementation of the Argeo Tabular API (CSV, JCR). */
+package org.argeo.cms.tabular;
\ No newline at end of file
package org.argeo.jackrabbit;
-import java.awt.geom.CubicCurve2D;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
--- /dev/null
+package org.argeo.security.jackrabbit;
+
+import java.security.Principal;
+import java.util.Map;
+import java.util.Set;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+import org.apache.jackrabbit.core.security.authorization.acl.ACLProvider;
+
+/** Argeo specific access control provider */
+public class ArgeoAccessControlProvider extends ACLProvider {
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ @Override
+ public void init(Session systemSession, Map configuration) throws RepositoryException {
+ if (!configuration.containsKey(PARAM_ALLOW_UNKNOWN_PRINCIPALS))
+ configuration.put(PARAM_ALLOW_UNKNOWN_PRINCIPALS, "true");
+ if (!configuration.containsKey(PARAM_OMIT_DEFAULT_PERMISSIONS))
+ configuration.put(PARAM_OMIT_DEFAULT_PERMISSIONS, "true");
+ super.init(systemSession, configuration);
+ }
+
+ @Override
+ public boolean canAccessRoot(Set<Principal> principals) throws RepositoryException {
+ return super.canAccessRoot(principals);
+ }
+
+}
--- /dev/null
+package org.argeo.security.jackrabbit;
+
+import javax.jcr.PathNotFoundException;
+import javax.jcr.RepositoryException;
+import javax.jcr.security.Privilege;
+
+import org.apache.jackrabbit.core.id.ItemId;
+import org.apache.jackrabbit.core.security.DefaultAccessManager;
+import org.apache.jackrabbit.spi.Path;
+
+/**
+ * Intermediary class in order to have a consistent naming in config files. Does
+ * nothing for the time being, but may in the future.
+ */
+public class ArgeoAccessManager extends DefaultAccessManager {
+
+ @Override
+ public boolean canRead(Path itemPath, ItemId itemId)
+ throws RepositoryException {
+ return super.canRead(itemPath, itemId);
+ }
+
+ @Override
+ public Privilege[] getPrivileges(String absPath)
+ throws PathNotFoundException, RepositoryException {
+ return super.getPrivileges(absPath);
+ }
+
+ @Override
+ public boolean hasPrivileges(String absPath, Privilege[] privileges)
+ throws PathNotFoundException, RepositoryException {
+ return super.hasPrivileges(absPath, privileges);
+ }
+
+}
--- /dev/null
+package org.argeo.security.jackrabbit;
+
+import javax.security.auth.Subject;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+
+import org.apache.jackrabbit.core.security.authentication.AuthContext;
+
+/** Wraps a regular {@link LoginContext}, using the proper class loader. */
+class ArgeoAuthContext implements AuthContext {
+ private LoginContext lc;
+
+ public ArgeoAuthContext(String appName, Subject subject, CallbackHandler callbackHandler) {
+ try {
+ lc = new LoginContext(appName, subject, callbackHandler);
+ } catch (LoginException e) {
+ throw new IllegalStateException("Cannot configure Jackrabbit login context", e);
+ }
+ }
+
+ @Override
+ public void login() throws LoginException {
+ lc.login();
+ }
+
+ @Override
+ public Subject getSubject() {
+ return lc.getSubject();
+ }
+
+ @Override
+ public void logout() throws LoginException {
+ lc.logout();
+ }
+
+}
--- /dev/null
+package org.argeo.security.jackrabbit;
+
+import java.security.Principal;
+import java.util.HashSet;
+import java.util.Properties;
+import java.util.Set;
+
+import javax.jcr.Credentials;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.security.auth.Subject;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.x500.X500Principal;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.api.security.user.UserManager;
+import org.apache.jackrabbit.core.DefaultSecurityManager;
+import org.apache.jackrabbit.core.security.AMContext;
+import org.apache.jackrabbit.core.security.AccessManager;
+import org.apache.jackrabbit.core.security.SecurityConstants;
+import org.apache.jackrabbit.core.security.SystemPrincipal;
+import org.apache.jackrabbit.core.security.authentication.AuthContext;
+import org.apache.jackrabbit.core.security.authentication.CallbackHandlerImpl;
+import org.apache.jackrabbit.core.security.authorization.WorkspaceAccessManager;
+import org.apache.jackrabbit.core.security.principal.AdminPrincipal;
+import org.apache.jackrabbit.core.security.principal.PrincipalProvider;
+import org.argeo.api.NodeConstants;
+import org.argeo.api.security.AnonymousPrincipal;
+import org.argeo.api.security.DataAdminPrincipal;
+import org.argeo.cms.auth.CmsSession;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.FrameworkUtil;
+
+/** Customises Jackrabbit security. */
+public class ArgeoSecurityManager extends DefaultSecurityManager {
+ private final static Log log = LogFactory.getLog(ArgeoSecurityManager.class);
+
+ private BundleContext cmsBundleContext = null;
+
+ public ArgeoSecurityManager() {
+ if (FrameworkUtil.getBundle(CmsSession.class) != null) {
+ cmsBundleContext = FrameworkUtil.getBundle(CmsSession.class).getBundleContext();
+ }
+ }
+
+ public AuthContext getAuthContext(Credentials creds, Subject subject, String workspaceName)
+ throws RepositoryException {
+ checkInitialized();
+
+ CallbackHandler cbHandler = new CallbackHandlerImpl(creds, getSystemSession(), getPrincipalProviderRegistry(),
+ adminId, anonymousId);
+ String appName = "Jackrabbit";
+ return new ArgeoAuthContext(appName, subject, cbHandler);
+ }
+
+ @Override
+ public AccessManager getAccessManager(Session session, AMContext amContext) throws RepositoryException {
+ synchronized (getSystemSession()) {
+ return super.getAccessManager(session, amContext);
+ }
+ }
+
+ @Override
+ public UserManager getUserManager(Session session) throws RepositoryException {
+ synchronized (getSystemSession()) {
+ return super.getUserManager(session);
+ }
+ }
+
+ @Override
+ protected PrincipalProvider createDefaultPrincipalProvider(Properties[] moduleConfig) throws RepositoryException {
+ return super.createDefaultPrincipalProvider(moduleConfig);
+ }
+
+ /** Called once when the session is created */
+ @Override
+ public String getUserID(Subject subject, String workspaceName) throws RepositoryException {
+ boolean isAnonymous = !subject.getPrincipals(AnonymousPrincipal.class).isEmpty();
+ boolean isDataAdmin = !subject.getPrincipals(DataAdminPrincipal.class).isEmpty();
+ boolean isJackrabbitSystem = !subject.getPrincipals(SystemPrincipal.class).isEmpty();
+ Set<X500Principal> userPrincipal = subject.getPrincipals(X500Principal.class);
+ boolean isRegularUser = !userPrincipal.isEmpty();
+ CmsSession cmsSession = null;
+ if (cmsBundleContext != null) {
+ cmsSession = CmsSession.getCmsSession(cmsBundleContext, subject);
+ if (log.isTraceEnabled())
+ log.trace("Opening JCR session for CMS session " + cmsSession);
+ }
+
+ if (isAnonymous) {
+ if (isDataAdmin || isJackrabbitSystem || isRegularUser)
+ throw new IllegalStateException("Inconsistent " + subject);
+ else
+ return NodeConstants.ROLE_ANONYMOUS;
+ } else if (isRegularUser) {// must be before DataAdmin
+ if (isAnonymous || isJackrabbitSystem)
+ throw new IllegalStateException("Inconsistent " + subject);
+ else {
+ if (userPrincipal.size() > 1) {
+ StringBuilder buf = new StringBuilder();
+ for (X500Principal principal : userPrincipal)
+ buf.append(' ').append('\"').append(principal).append('\"');
+ throw new RuntimeException("Multiple user principals:" + buf);
+ }
+ return userPrincipal.iterator().next().getName();
+ }
+ } else if (isDataAdmin) {
+ if (isAnonymous || isJackrabbitSystem || isRegularUser)
+ throw new IllegalStateException("Inconsistent " + subject);
+ else {
+ assert !subject.getPrincipals(AdminPrincipal.class).isEmpty();
+ return NodeConstants.ROLE_DATA_ADMIN;
+ }
+ } else if (isJackrabbitSystem) {
+ if (isAnonymous || isDataAdmin || isRegularUser)
+ throw new IllegalStateException("Inconsistent " + subject);
+ else
+ return super.getUserID(subject, workspaceName);
+ } else {
+ throw new IllegalStateException("Unrecognized subject type: " + subject);
+ }
+ }
+
+ @Override
+ protected WorkspaceAccessManager createDefaultWorkspaceAccessManager() {
+ WorkspaceAccessManager wam = super.createDefaultWorkspaceAccessManager();
+ ArgeoWorkspaceAccessManagerImpl workspaceAccessManager = new ArgeoWorkspaceAccessManagerImpl(wam);
+ if (log.isTraceEnabled())
+ log.trace("Created workspace access manager");
+ return workspaceAccessManager;
+ }
+
+ private class ArgeoWorkspaceAccessManagerImpl implements SecurityConstants, WorkspaceAccessManager {
+ private final WorkspaceAccessManager wam;
+
+ public ArgeoWorkspaceAccessManagerImpl(WorkspaceAccessManager wam) {
+ super();
+ this.wam = wam;
+ }
+
+ public void init(Session systemSession) throws RepositoryException {
+ wam.init(systemSession);
+ Repository repository = systemSession.getRepository();
+ if (log.isTraceEnabled())
+ log.trace("Initialised workspace access manager on repository " + repository
+ + ", systemSession workspace: " + systemSession.getWorkspace().getName());
+ }
+
+ public void close() throws RepositoryException {
+ }
+
+ public boolean grants(Set<Principal> principals, String workspaceName) throws RepositoryException {
+ // TODO: implements finer access to workspaces
+ if (log.isTraceEnabled())
+ log.trace("Grants " + new HashSet<>(principals) + " access to workspace '" + workspaceName + "'");
+ return true;
+ // return wam.grants(principals, workspaceName);
+ }
+ }
+
+}
--- /dev/null
+package org.argeo.security.jackrabbit;
+
+import java.util.Map;
+import java.util.Set;
+
+import javax.security.auth.Subject;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.login.LoginException;
+import javax.security.auth.spi.LoginModule;
+import javax.security.auth.x500.X500Principal;
+
+import org.apache.jackrabbit.core.security.AnonymousPrincipal;
+import org.apache.jackrabbit.core.security.SecurityConstants;
+import org.apache.jackrabbit.core.security.principal.AdminPrincipal;
+import org.argeo.api.security.DataAdminPrincipal;
+
+/** JAAS login module used when initiating a new Jackrabbit session. */
+public class SystemJackrabbitLoginModule implements LoginModule {
+ private Subject subject;
+
+ @Override
+ public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState,
+ Map<String, ?> options) {
+ this.subject = subject;
+ }
+
+ @Override
+ public boolean login() throws LoginException {
+ return true;
+ }
+
+ @Override
+ public boolean commit() throws LoginException {
+ Set<org.argeo.api.security.AnonymousPrincipal> anonPrincipal = subject
+ .getPrincipals(org.argeo.api.security.AnonymousPrincipal.class);
+ if (!anonPrincipal.isEmpty()) {
+ subject.getPrincipals().add(new AnonymousPrincipal());
+ return true;
+ }
+
+ Set<DataAdminPrincipal> initPrincipal = subject.getPrincipals(DataAdminPrincipal.class);
+ if (!initPrincipal.isEmpty()) {
+ subject.getPrincipals().add(new AdminPrincipal(SecurityConstants.ADMIN_ID));
+ return true;
+ }
+
+ Set<X500Principal> userPrincipal = subject.getPrincipals(X500Principal.class);
+ if (userPrincipal.isEmpty())
+ throw new LoginException("Subject must be pre-authenticated");
+ if (userPrincipal.size() > 1)
+ throw new LoginException("Multiple user principals " + userPrincipal);
+
+ return true;
+ }
+
+ @Override
+ public boolean abort() throws LoginException {
+ return true;
+ }
+
+ @Override
+ public boolean logout() throws LoginException {
+ subject.getPrincipals().removeAll(subject.getPrincipals(AnonymousPrincipal.class));
+ subject.getPrincipals().removeAll(subject.getPrincipals(AdminPrincipal.class));
+ return true;
+ }
+}
--- /dev/null
+/** Integration of Jackrabbit with Argeo security model. */
+package org.argeo.security.jackrabbit;
\ No newline at end of file
+++ /dev/null
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=disabled
-org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
-org.eclipse.jdt.core.compiler.annotation.nonnull=org.eclipse.jdt.annotation.NonNull
-org.eclipse.jdt.core.compiler.annotation.nonnull.secondary=
-org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annotation.NonNullByDefault
-org.eclipse.jdt.core.compiler.annotation.nonnullbydefault.secondary=
-org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable
-org.eclipse.jdt.core.compiler.annotation.nullable.secondary=
-org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled
-org.eclipse.jdt.core.compiler.problem.APILeak=warning
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning
-org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
-org.eclipse.jdt.core.compiler.problem.comparingIdentical=warning
-org.eclipse.jdt.core.compiler.problem.deadCode=warning
-org.eclipse.jdt.core.compiler.problem.deprecation=warning
-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=warning
-org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
-org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
-org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
-org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=warning
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=warning
-org.eclipse.jdt.core.compiler.problem.includeNullInfoFromAsserts=disabled
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning
-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning
-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
-org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=warning
-org.eclipse.jdt.core.compiler.problem.missingDefaultCase=ignore
-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingEnumCaseDespiteDefault=disabled
-org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning
-org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=warning
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=warning
-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
-org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
-org.eclipse.jdt.core.compiler.problem.nonnullTypeVariableFromLegacyInvocation=warning
-org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=error
-org.eclipse.jdt.core.compiler.problem.nullReference=warning
-org.eclipse.jdt.core.compiler.problem.nullSpecViolation=error
-org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning
-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.pessimisticNullAnalysisForFreeTypeVariables=warning
-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
-org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore
-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=warning
-org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
-org.eclipse.jdt.core.compiler.problem.suppressWarningsNotFullyAnalysed=info
-org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled
-org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
-org.eclipse.jdt.core.compiler.problem.terminalDeprecation=warning
-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
-org.eclipse.jdt.core.compiler.problem.unavoidableGenericTypeProblems=enabled
-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
-org.eclipse.jdt.core.compiler.problem.unclosedCloseable=warning
-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentType=warning
-org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentTypeStrict=disabled
-org.eclipse.jdt.core.compiler.problem.unlikelyEqualsArgumentType=info
-org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
-org.eclipse.jdt.core.compiler.problem.unstableAutoModuleName=warning
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.unusedExceptionParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedImport=warning
-org.eclipse.jdt.core.compiler.problem.unusedLabel=warning
-org.eclipse.jdt.core.compiler.problem.unusedLocal=warning
-org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=warning
-org.eclipse.jdt.core.compiler.problem.unusedTypeParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=warning
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="init" deactivate="destroy" name="org.argeo.cms.dataServletContext">
- <implementation class="org.argeo.cms.servlet.CmsServletContext"/>
- <service>
- <provide interface="org.osgi.service.http.context.ServletContextHelper"/>
- </service>
- <property name="osgi.http.whiteboard.context.name" type="String" value="dataServletContext"/>
- <property name="osgi.http.whiteboard.context.path" type="String" value="/data"/>
-</scr:component>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" name="org.argeo.cms.filesServlet">
- <implementation class="org.argeo.cms.internal.http.CmsWebDavServlet"/>
- <service>
- <provide interface="javax.servlet.Servlet"/>
- </service>
- <property name="osgi.http.whiteboard.servlet.pattern" type="String" value="/*"/>
- <property name="osgi.http.whiteboard.context.select" type="String" value="(osgi.http.whiteboard.context.name=filesServletContext)"/>
- <property name="servlet.init.resource-config" type="String" value="/org/argeo/cms/internal/http/webdav-config.xml"/>
- <property name="servlet.init.resource-path-prefix" type="String" value="/files"/>
- <reference bind="setRepository" cardinality="1..1" interface="javax.jcr.Repository" policy="static" target="(cn=ego)"/>
-</scr:component>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="init" deactivate="destroy" name="org.argeo.cms.filesServletContext">
- <implementation class="org.argeo.cms.servlet.PrivateWwwAuthServletContext"/>
- <service>
- <provide interface="org.osgi.service.http.context.ServletContextHelper"/>
- </service>
- <property name="osgi.http.whiteboard.context.name" type="String" value="filesServletContext"/>
- <property name="osgi.http.whiteboard.context.path" type="String" value="/files"/>
-</scr:component>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="init" deactivate="destroy" name="org.argeo.cms.jcrServletContext">
- <implementation class="org.argeo.cms.servlet.PrivateWwwAuthServletContext"/>
- <service>
- <provide interface="org.osgi.service.http.context.ServletContextHelper"/>
- </service>
- <property name="osgi.http.whiteboard.context.name" type="String" value="jcrServletContext"/>
- <property name="osgi.http.whiteboard.context.path" type="String" value="/jcr"/>
-</scr:component>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" name="CMS Deployment">
- <implementation class="org.argeo.cms.internal.kernel.CmsDeployment"/>
- <service>
- <provide interface="org.argeo.api.NodeDeployment"/>
- </service>
-</scr:component>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" name="CMS Instance">
- <implementation class="org.argeo.cms.internal.kernel.CmsInstance"/>
- <service>
- <provide interface="org.argeo.api.NodeInstance"/>
- </service>
-</scr:component>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<scr:component xmlns:scr="http://www.osgi.org/xmlns/scr/v1.1.0" activate="initServices" name="CMS State">
- <implementation class="org.argeo.cms.internal.kernel.CmsState"/>
- <service>
- <provide interface="org.argeo.api.NodeState"/>
- </service>
-</scr:component>
Bundle-Activator: org.argeo.cms.internal.kernel.Activator
-Import-Package: javax.jcr.security,\
-org.h2;resolution:=optional,\
-org.postgresql;resolution:=optional,\
-org.apache.jackrabbit.webdav.server,\
-org.apache.jackrabbit.webdav.jcr,\
-org.apache.commons.httpclient.cookie;resolution:=optional,\
+Import-Package: org.apache.commons.httpclient.cookie;resolution:=optional,\
!com.sun.security.jgss,\
-org.osgi.framework.namespace;version=0.0.0,\
org.osgi.*;version=0.0.0,\
-org.osgi.service.http.whiteboard,\
+org.osgi.service.http.whiteboard;version=0.0.0,\
+org.osgi.framework.namespace;version=0.0.0,\
*
Service-Component:\
OSGI-INF/cmsUserManager.xml,\
OSGI-INF/pkgServletContext.xml,\
-OSGI-INF/pkgServlet.xml,\
-OSGI-INF/jcrServletContext.xml,\
-OSGI-INF/dataServletContext.xml,\
-OSGI-INF/filesServletContext.xml,\
-OSGI-INF/filesServlet.xml
+OSGI-INF/pkgServlet.xml
-Provide-Capability: cms.datamodel;name=argeo;cnd=/org/argeo/cms/argeo.cnd;abstract=true,\
-osgi.service;objectClass="javax.jcr.Repository"
OSGI-INF/
source.. = src/,\
ext/test/
-additional.bundles = org.apache.jackrabbit.data,\
- org.junit
+additional.bundles = org.junit
+++ /dev/null
-package org.argeo.cms.tabular;
-
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.jcr.Node;
-import javax.jcr.PropertyType;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.commons.cnd.CndImporter;
-import org.argeo.api.tabular.TabularColumn;
-import org.argeo.api.tabular.TabularRow;
-import org.argeo.api.tabular.TabularRowIterator;
-import org.argeo.api.tabular.TabularWriter;
-import org.argeo.cms.ArgeoTypes;
-import org.argeo.jackrabbit.unit.AbstractJackrabbitTestCase;
-
-public class JcrTabularTest extends AbstractJackrabbitTestCase {
- private final static Log log = LogFactory.getLog(JcrTabularTest.class);
-
- public void testWriteReadCsv() throws Exception {
- // session().setNamespacePrefix("argeo", ArgeoNames.ARGEO_NAMESPACE);
- InputStreamReader reader = new InputStreamReader(getClass().getResourceAsStream("/org/argeo/api/ldap.cnd"));
- CndImporter.registerNodeTypes(reader, session());
- reader.close();
- reader = new InputStreamReader(getClass().getResourceAsStream("/org/argeo/cms/argeo.cnd"));
- CndImporter.registerNodeTypes(reader, session());
- reader.close();
-// reader = new InputStreamReader(getClass().getResourceAsStream("/org/argeo/cms/cms.cnd"));
-// CndImporter.registerNodeTypes(reader, session());
-// reader.close();
-
- // write
- Integer columnCount = 15;
- Long rowCount = 1000l;
- String stringValue = "test, \ntest";
-
- List<TabularColumn> header = new ArrayList<TabularColumn>();
- for (int i = 0; i < columnCount; i++) {
- header.add(new TabularColumn("col" + i, PropertyType.STRING));
- }
- Node tableNode = session().getRootNode().addNode("table", ArgeoTypes.ARGEO_TABLE);
- TabularWriter writer = new JcrTabularWriter(tableNode, header, ArgeoTypes.ARGEO_CSV);
- for (int i = 0; i < rowCount; i++) {
- List<Object> objs = new ArrayList<Object>();
- for (int j = 0; j < columnCount; j++) {
- objs.add(stringValue);
- }
- writer.appendRow(objs.toArray());
- }
- writer.close();
- session().save();
-
- if (log.isDebugEnabled())
- log.debug("Wrote tabular content " + rowCount + " rows, " + columnCount + " columns");
- // read
- TabularRowIterator rowIt = new JcrTabularRowIterator(tableNode);
- Long count = 0l;
- while (rowIt.hasNext()) {
- TabularRow tr = rowIt.next();
- assertEquals(header.size(), tr.size());
- count++;
- }
- assertEquals(rowCount, count);
- if (log.isDebugEnabled())
- log.debug("Read tabular content " + rowCount + " rows, " + columnCount + " columns");
- }
-}
<artifactId>org.argeo.api</artifactId>
<version>2.3-SNAPSHOT</version>
</dependency>
- <dependency>
- <groupId>org.argeo.commons</groupId>
- <artifactId>org.argeo.cms.jcr</artifactId>
- <version>2.3-SNAPSHOT</version>
- </dependency>
<dependency>
<groupId>org.argeo.commons</groupId>
<artifactId>org.argeo.enterprise</artifactId>
+++ /dev/null
-<argeo = 'http://www.argeo.org/ns/argeo'>
-
-// GENERIC TYPES
-[argeo:remoteRepository] > nt:unstructured
-- argeo:uri (STRING)
-- argeo:userID (STRING)
-+ argeo:password (argeo:encrypted)
-
-// TABULAR CONTENT
-[argeo:table] > nt:file
-+ * (argeo:column) *
-
-[argeo:column] > mix:title
-- jcr:requiredType (STRING) = 'STRING'
-
-[argeo:csv] > nt:resource
-
-// CRYPTO
-[argeo:encrypted]
-mixin
-// initialization vector used by some algorithms
-- argeo:iv (BINARY)
-
-[argeo:pbeKeySpec]
-mixin
-- argeo:secretKeyFactory (STRING)
-- argeo:salt (BINARY)
-- argeo:iterationCount (LONG)
-- argeo:keyLength (LONG)
-- argeo:secretKeyEncryption (STRING)
-
-[argeo:pbeSpec] > argeo:pbeKeySpec
-mixin
-- argeo:cipher (STRING)
+++ /dev/null
-// DN (see https://tools.ietf.org/html/rfc4514)
-<cn = 'http://www.argeo.org/ns/rfc4514/cn'>
-<l = 'http://www.argeo.org/ns/rfc4514/l'>
-<st = 'http://www.argeo.org/ns/rfc4514/st'>
-<o = 'http://www.argeo.org/ns/rfc4514/o'>
-<ou = 'http://www.argeo.org/ns/rfc4514/ou'>
-<c = 'http://www.argeo.org/ns/rfc4514/c'>
-<street = 'http://www.argeo.org/ns/rfc4514/street'>
-<dc = 'http://www.argeo.org/ns/rfc4514/dc'>
-<uid = 'http://www.argeo.org/ns/rfc4514/uid'>
+++ /dev/null
-package org.argeo.cms.fs;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.file.FileSystem;
-import java.nio.file.Path;
-import java.nio.file.spi.FileSystemProvider;
-
-import javax.jcr.NoSuchWorkspaceException;
-import javax.jcr.Node;
-import javax.jcr.NodeIterator;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.query.Query;
-import javax.jcr.query.QueryManager;
-
-import org.argeo.api.NodeConstants;
-import org.argeo.jcr.Jcr;
-
-/** Utilities around documents. */
-public class CmsFsUtils {
- // TODO make it more robust and configurable
- private static String baseWorkspaceName = NodeConstants.SYS_WORKSPACE;
-
- public static Node getNode(Repository repository, Path path) {
- String workspaceName = path.getNameCount() == 0 ? baseWorkspaceName : path.getName(0).toString();
- String jcrPath = '/' + path.subpath(1, path.getNameCount()).toString();
- try {
- Session newSession;
- try {
- newSession = repository.login(workspaceName);
- } catch (NoSuchWorkspaceException e) {
- // base workspace
- newSession = repository.login(baseWorkspaceName);
- jcrPath = path.toString();
- }
- return newSession.getNode(jcrPath);
- } catch (RepositoryException e) {
- throw new IllegalStateException("Cannot get node from path " + path, e);
- }
- }
-
- public static NodeIterator getLastUpdatedDocuments(Session session) {
- try {
- String qStr = "//element(*, nt:file)";
- qStr += " order by @jcr:lastModified descending";
- QueryManager queryManager = session.getWorkspace().getQueryManager();
- @SuppressWarnings("deprecation")
- Query xpathQuery = queryManager.createQuery(qStr, Query.XPATH);
- xpathQuery.setLimit(8);
- NodeIterator nit = xpathQuery.execute().getNodes();
- return nit;
- } catch (RepositoryException e) {
- throw new IllegalStateException("Unable to retrieve last updated documents", e);
- }
- }
-
- public static Path getPath(FileSystemProvider nodeFileSystemProvider, URI uri) {
- try {
- FileSystem fileSystem = nodeFileSystemProvider.getFileSystem(uri);
- if (fileSystem == null)
- fileSystem = nodeFileSystemProvider.newFileSystem(uri, null);
- String path = uri.getPath();
- return fileSystem.getPath(path);
- } catch (IOException e) {
- throw new IllegalStateException("Unable to initialise file system for " + uri, e);
- }
- }
-
- public static Path getPath(FileSystemProvider nodeFileSystemProvider, Node node) {
- String workspaceName = Jcr.getWorkspaceName(node);
- String fullPath = baseWorkspaceName.equals(workspaceName) ? Jcr.getPath(node)
- : '/' + workspaceName + Jcr.getPath(node);
- URI uri;
- try {
- uri = new URI(NodeConstants.SCHEME_NODE, null, fullPath, null);
- } catch (URISyntaxException e) {
- throw new IllegalArgumentException("Cannot interpret " + fullPath + " as an URI", e);
- }
- return getPath(nodeFileSystemProvider, uri);
- }
-
- /** Singleton. */
- private CmsFsUtils() {
- }
-}
+++ /dev/null
-package org.argeo.cms.internal.http;
-
-import java.util.Map;
-
-import javax.jcr.Repository;
-
-import org.apache.jackrabbit.server.SessionProvider;
-import org.apache.jackrabbit.server.remoting.davex.JcrRemotingServlet;
-import org.argeo.api.NodeConstants;
-
-/** A {@link JcrRemotingServlet} based on {@link CmsSessionProvider}. */
-public class CmsRemotingServlet extends JcrRemotingServlet {
- private static final long serialVersionUID = 6459455509684213633L;
- private Repository repository;
- private SessionProvider sessionProvider;
-
- public CmsRemotingServlet() {
- }
-
- public CmsRemotingServlet(String alias, Repository repository) {
- this.repository = repository;
- this.sessionProvider = new CmsSessionProvider(alias);
- }
-
- @Override
- public Repository getRepository() {
- return repository;
- }
-
- public void setRepository(Repository repository, Map<String, String> properties) {
- this.repository = repository;
- String alias = properties.get(NodeConstants.CN);
- if (alias != null)
- sessionProvider = new CmsSessionProvider(alias);
- else
- throw new IllegalArgumentException("Only aliased repositories are supported");
- }
-
- @Override
- protected SessionProvider getSessionProvider() {
- return sessionProvider;
- }
-
-}
+++ /dev/null
-package org.argeo.cms.internal.http;
-
-import java.io.Serializable;
-import java.security.PrivilegedActionException;
-import java.security.PrivilegedExceptionAction;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Set;
-
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.security.auth.Subject;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.server.SessionProvider;
-import org.argeo.api.NodeConstants;
-import org.argeo.cms.auth.CmsSession;
-import org.argeo.cms.internal.auth.CmsSessionImpl;
-import org.argeo.jcr.JcrUtils;
-
-/**
- * Implements an open session in view patter: a new JCR session is created for
- * each request
- */
-public class CmsSessionProvider implements SessionProvider, Serializable {
- private static final long serialVersionUID = -1358136599534938466L;
-
- private final static Log log = LogFactory.getLog(CmsSessionProvider.class);
-
- private final String alias;
-
- private LinkedHashMap<Session, CmsDataSession> cmsSessions = new LinkedHashMap<>();
-
- public CmsSessionProvider(String alias) {
- this.alias = alias;
- }
-
- public Session getSession(HttpServletRequest request, Repository rep, String workspace)
- throws javax.jcr.LoginException, ServletException, RepositoryException {
-
- // a client is scanning parent URLs.
-// if (workspace == null)
-// return null;
-
- CmsSessionImpl cmsSession = WebCmsSessionImpl.getCmsSession(request);
- if (log.isTraceEnabled()) {
- log.trace("Get JCR session from " + cmsSession);
- }
- if (cmsSession == null)
- throw new IllegalStateException("Cannot find a session for request " + request.getRequestURI());
- CmsDataSession cmsDataSession = new CmsDataSession(cmsSession);
- Session session = cmsDataSession.getDataSession(alias, workspace, rep);
- cmsSessions.put(session, cmsDataSession);
- return session;
- }
-
- public void releaseSession(Session session) {
-// JcrUtils.logoutQuietly(session);
- if (cmsSessions.containsKey(session)) {
- CmsDataSession cmsDataSession = cmsSessions.get(session);
- cmsDataSession.releaseDataSession(alias, session);
- } else {
- log.warn("JCR session " + session + " not found in CMS session list. Logging it out...");
- JcrUtils.logoutQuietly(session);
- }
- }
-
- static class CmsDataSession {
- private CmsSession cmsSession;
-
- private Map<String, Session> dataSessions = new HashMap<>();
- private Set<String> dataSessionsInUse = new HashSet<>();
- private Set<Session> additionalDataSessions = new HashSet<>();
-
- private CmsDataSession(CmsSession cmsSession) {
- this.cmsSession = cmsSession;
- }
-
- public Session newDataSession(String cn, String workspace, Repository repository) {
- checkValid();
- return login(repository, workspace);
- }
-
- public synchronized Session getDataSession(String cn, String workspace, Repository repository) {
- checkValid();
- // FIXME make it more robust
- if (workspace == null)
- workspace = NodeConstants.SYS_WORKSPACE;
- String path = cn + '/' + workspace;
- if (dataSessionsInUse.contains(path)) {
- try {
- wait(1000);
- if (dataSessionsInUse.contains(path)) {
- Session session = login(repository, workspace);
- additionalDataSessions.add(session);
- if (log.isTraceEnabled())
- log.trace("Additional data session " + path + " for " + cmsSession.getUserDn());
- return session;
- }
- } catch (InterruptedException e) {
- // silent
- }
- }
-
- Session session = null;
- if (dataSessions.containsKey(path)) {
- session = dataSessions.get(path);
- } else {
- session = login(repository, workspace);
- dataSessions.put(path, session);
- if (log.isTraceEnabled())
- log.trace("New data session " + path + " for " + cmsSession.getUserDn());
- }
- dataSessionsInUse.add(path);
- return session;
- }
-
- private Session login(Repository repository, String workspace) {
- try {
- return Subject.doAs(cmsSession.getSubject(), new PrivilegedExceptionAction<Session>() {
- @Override
- public Session run() throws Exception {
- return repository.login(workspace);
- }
- });
- } catch (PrivilegedActionException e) {
- throw new IllegalStateException("Cannot log in " + cmsSession.getUserDn() + " to JCR", e);
- }
- }
-
- public synchronized void releaseDataSession(String cn, Session session) {
- if (additionalDataSessions.contains(session)) {
- JcrUtils.logoutQuietly(session);
- additionalDataSessions.remove(session);
- if (log.isTraceEnabled())
- log.trace("Remove additional data session " + session);
- return;
- }
- String path = cn + '/' + session.getWorkspace().getName();
- if (!dataSessionsInUse.contains(path))
- log.warn("Data session " + path + " was not in use for " + cmsSession.getUserDn());
- dataSessionsInUse.remove(path);
- Session registeredSession = dataSessions.get(path);
- if (session != registeredSession)
- log.warn("Data session " + path + " not consistent for " + cmsSession.getUserDn());
- if (log.isTraceEnabled())
- log.trace("Released data session " + session + " for " + path);
- notifyAll();
- }
-
- private void checkValid() {
- if (!cmsSession.isValid())
- throw new IllegalStateException(
- "CMS session " + cmsSession.getUuid() + " is not valid since " + cmsSession.getEnd());
- }
-
- private void close() {
- // FIXME class this when CMS session is closed
- synchronized (this) {
- // TODO check data session in use ?
- for (String path : dataSessions.keySet())
- JcrUtils.logoutQuietly(dataSessions.get(path));
- for (Session session : additionalDataSessions)
- JcrUtils.logoutQuietly(session);
- }
- }
- }
-}
+++ /dev/null
-package org.argeo.cms.internal.http;
-
-import java.util.Map;
-
-import javax.jcr.Repository;
-
-import org.apache.jackrabbit.webdav.simple.SimpleWebdavServlet;
-import org.argeo.api.NodeConstants;
-
-/** A {@link SimpleWebdavServlet} based on {@link CmsSessionProvider}. */
-public class CmsWebDavServlet extends SimpleWebdavServlet {
- private static final long serialVersionUID = 7485800288686328063L;
- private Repository repository;
-
- public CmsWebDavServlet() {
- }
-
- public CmsWebDavServlet(String alias, Repository repository) {
- this.repository = repository;
- setSessionProvider(new CmsSessionProvider(alias));
- }
-
- @Override
- public Repository getRepository() {
- return repository;
- }
-
- public void setRepository(Repository repository, Map<String, String> properties) {
- this.repository = repository;
- String alias = properties.get(NodeConstants.CN);
- if (alias != null)
- setSessionProvider(new CmsSessionProvider(alias));
- else
- throw new IllegalArgumentException("Only aliased repositories are supported");
- }
-
-}
public final static String HEADER_AUTHORIZATION = "Authorization";
public final static String HEADER_WWW_AUTHENTICATE = "WWW-Authenticate";
- public final static String DEFAULT_PROTECTED_HANDLERS = "/org/argeo/cms/internal/http/protectedHandlers.xml";
- public final static String WEBDAV_CONFIG = "/org/argeo/cms/internal/http/webdav-config.xml";
-
static boolean isBrowser(String userAgent) {
return userAgent.contains("webkit") || userAgent.contains("gecko") || userAgent.contains("firefox")
|| userAgent.contains("msie") || userAgent.contains("chrome") || userAgent.contains("chromium")
+++ /dev/null
-package org.argeo.cms.internal.http;
-
-import static javax.jcr.Property.JCR_DESCRIPTION;
-import static javax.jcr.Property.JCR_LAST_MODIFIED;
-import static javax.jcr.Property.JCR_TITLE;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.security.PrivilegedExceptionAction;
-import java.util.Calendar;
-import java.util.Collection;
-
-import javax.jcr.Node;
-import javax.jcr.NodeIterator;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.security.auth.Subject;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.argeo.api.NodeConstants;
-import org.argeo.api.NodeUtils;
-import org.argeo.cms.CmsException;
-import org.argeo.jcr.JcrUtils;
-import org.osgi.framework.BundleContext;
-import org.osgi.framework.FrameworkUtil;
-import org.osgi.framework.ServiceReference;
-
-public class LinkServlet extends HttpServlet {
- private final BundleContext bc = FrameworkUtil.getBundle(getClass()).getBundleContext();
-
- private static final long serialVersionUID = 3749990143146845708L;
-
- @Override
- protected void service(HttpServletRequest request, HttpServletResponse response)
- throws ServletException, IOException {
- String path = request.getPathInfo();
- String userAgent = request.getHeader("User-Agent").toLowerCase();
- boolean isBot = false;
- // boolean isCompatibleBrowser = false;
- if (userAgent.contains("bot") || userAgent.contains("facebook") || userAgent.contains("twitter")) {
- isBot = true;
- }
- // else if (userAgent.contains("webkit") ||
- // userAgent.contains("gecko") || userAgent.contains("firefox")
- // || userAgent.contains("msie") || userAgent.contains("chrome") ||
- // userAgent.contains("chromium")
- // || userAgent.contains("opera") || userAgent.contains("browser"))
- // {
- // isCompatibleBrowser = true;
- // }
-
- if (isBot) {
- // log.warn("# BOT " + request.getHeader("User-Agent"));
- canonicalAnswer(request, response, path);
- return;
- }
-
- // if (isCompatibleBrowser && log.isTraceEnabled())
- // log.trace("# BWS " + request.getHeader("User-Agent"));
- redirectTo(response, "/#" + path);
- }
-
- private void redirectTo(HttpServletResponse response, String location) {
- response.setHeader("Location", location);
- response.setStatus(HttpServletResponse.SC_FOUND);
- }
-
- // private boolean canonicalAnswerNeededBy(HttpServletRequest request) {
- // String userAgent = request.getHeader("User-Agent").toLowerCase();
- // return userAgent.startsWith("facebookexternalhit/");
- // }
-
- /** For bots which don't understand RWT. */
- private void canonicalAnswer(HttpServletRequest request, HttpServletResponse response, String path) {
- Session session = null;
- try {
- PrintWriter writer = response.getWriter();
- session = Subject.doAs(anonymousLogin(), new PrivilegedExceptionAction<Session>() {
-
- @Override
- public Session run() throws Exception {
- Collection<ServiceReference<Repository>> srs = bc.getServiceReferences(Repository.class,
- "(" + NodeConstants.CN + "=" + NodeConstants.EGO_REPOSITORY + ")");
- Repository repository = bc.getService(srs.iterator().next());
- return repository.login();
- }
-
- });
- Node node = session.getNode(path);
- String title = node.hasProperty(JCR_TITLE) ? node.getProperty(JCR_TITLE).getString() : node.getName();
- String desc = node.hasProperty(JCR_DESCRIPTION) ? node.getProperty(JCR_DESCRIPTION).getString() : null;
- Calendar lastUpdate = node.hasProperty(JCR_LAST_MODIFIED) ? node.getProperty(JCR_LAST_MODIFIED).getDate()
- : null;
- String url = getCanonicalUrl(node, request);
- String imgUrl = null;
- // TODO support images
-// loop: for (NodeIterator it = node.getNodes(); it.hasNext();) {
-// // Takes the first found cms:image
-// Node child = it.nextNode();
-// if (child.isNodeType(CMS_IMAGE)) {
-// imgUrl = getDataUrl(child, request);
-// break loop;
-// }
-// }
- StringBuilder buf = new StringBuilder();
- buf.append("<html>");
- buf.append("<head>");
- writeMeta(buf, "og:title", escapeHTML(title));
- writeMeta(buf, "og:type", "website");
- buf.append("<meta name='twitter:card' content='summary' />");
- buf.append("<meta name='twitter:site' content='@argeo_org' />");
- writeMeta(buf, "og:url", url);
- if (desc != null)
- writeMeta(buf, "og:description", escapeHTML(desc));
- if (imgUrl != null)
- writeMeta(buf, "og:image", imgUrl);
- if (lastUpdate != null)
- writeMeta(buf, "og:updated_time", Long.toString(lastUpdate.getTime().getTime()));
- buf.append("</head>");
- buf.append("<body>");
- buf.append("<p><b>!! This page is meant for indexing robots, not for real people," + " visit <a href='/#")
- .append(path).append("'>").append(escapeHTML(title)).append("</a> instead.</b></p>");
- writeCanonical(buf, node);
- buf.append("</body>");
- buf.append("</html>");
- writer.print(buf.toString());
-
- response.setHeader("Content-Type", "text/html");
- writer.flush();
- } catch (Exception e) {
- throw new CmsException("Cannot write canonical answer", e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- /**
- * From http://stackoverflow.com/questions/1265282/recommended-method-for-
- * escaping-html-in-java (+ escaping '). TODO Use
- * org.apache.commons.lang.StringEscapeUtils
- */
- private String escapeHTML(String s) {
- StringBuilder out = new StringBuilder(Math.max(16, s.length()));
- for (int i = 0; i < s.length(); i++) {
- char c = s.charAt(i);
- if (c > 127 || c == '\'' || c == '"' || c == '<' || c == '>' || c == '&') {
- out.append("&#");
- out.append((int) c);
- out.append(';');
- } else {
- out.append(c);
- }
- }
- return out.toString();
- }
-
- private void writeMeta(StringBuilder buf, String tag, String value) {
- buf.append("<meta property='").append(tag).append("' content='").append(value).append("'/>");
- }
-
- private void writeCanonical(StringBuilder buf, Node node) throws RepositoryException {
- buf.append("<div>");
- if (node.hasProperty(JCR_TITLE))
- buf.append("<p>").append(node.getProperty(JCR_TITLE).getString()).append("</p>");
- if (node.hasProperty(JCR_DESCRIPTION))
- buf.append("<p>").append(node.getProperty(JCR_DESCRIPTION).getString()).append("</p>");
- NodeIterator children = node.getNodes();
- while (children.hasNext()) {
- writeCanonical(buf, children.nextNode());
- }
- buf.append("</div>");
- }
-
- // DATA
- private StringBuilder getServerBaseUrl(HttpServletRequest request) {
- try {
- URL url = new URL(request.getRequestURL().toString());
- StringBuilder buf = new StringBuilder();
- buf.append(url.getProtocol()).append("://").append(url.getHost());
- if (url.getPort() != -1)
- buf.append(':').append(url.getPort());
- return buf;
- } catch (MalformedURLException e) {
- throw new CmsException("Cannot extract server base URL from " + request.getRequestURL(), e);
- }
- }
-
- private String getDataUrl(Node node, HttpServletRequest request) throws RepositoryException {
- try {
- StringBuilder buf = getServerBaseUrl(request);
- buf.append(NodeUtils.getDataPath(NodeConstants.EGO_REPOSITORY, node));
- return new URL(buf.toString()).toString();
- } catch (MalformedURLException e) {
- throw new CmsException("Cannot build data URL for " + node, e);
- }
- }
-
- // public static String getDataPath(Node node) throws
- // RepositoryException {
- // assert node != null;
- // String userId = node.getSession().getUserID();
- //// if (log.isTraceEnabled())
- //// log.trace(userId + " : " + node.getPath());
- // StringBuilder buf = new StringBuilder();
- // boolean isAnonymous =
- // userId.equalsIgnoreCase(NodeConstants.ROLE_ANONYMOUS);
- // if (isAnonymous)
- // buf.append(WEBDAV_PUBLIC);
- // else
- // buf.append(WEBDAV_PRIVATE);
- // Session session = node.getSession();
- // Repository repository = session.getRepository();
- // String cn;
- // if (repository.isSingleValueDescriptor(NodeConstants.CN)) {
- // cn = repository.getDescriptor(NodeConstants.CN);
- // } else {
- //// log.warn("No cn defined in repository, using " +
- // NodeConstants.NODE);
- // cn = NodeConstants.NODE;
- // }
- // return
- // buf.append('/').append(cn).append('/').append(session.getWorkspace().getName()).append(node.getPath())
- // .toString();
- // }
-
- private String getCanonicalUrl(Node node, HttpServletRequest request) throws RepositoryException {
- try {
- StringBuilder buf = getServerBaseUrl(request);
- buf.append('/').append('!').append(node.getPath());
- return new URL(buf.toString()).toString();
- } catch (MalformedURLException e) {
- throw new CmsException("Cannot build data URL for " + node, e);
- }
- // return request.getRequestURL().append('!').append(node.getPath())
- // .toString();
- }
-
- private Subject anonymousLogin() {
- Subject subject = new Subject();
- LoginContext lc;
- try {
- lc = new LoginContext(NodeConstants.LOGIN_CONTEXT_ANONYMOUS, subject);
- lc.login();
- return subject;
- } catch (LoginException e) {
- throw new CmsException("Cannot login as anonymous", e);
- }
- }
-
-}
+++ /dev/null
-<config>
- <protecteditemremovehandler>
- <class name="org.apache.jackrabbit.server.remoting.davex.AclRemoveHandler" />
- </protecteditemremovehandler>
-</config>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements. See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- -->
-<!--
-<!DOCTYPE config [
- <!ELEMENT config (iomanager , propertymanager, (collection | noncollection)? , filter?, mimetypeproperties?) >
-
- <!ELEMENT iomanager (class, iohandler*) >
- <!ELEMENT iohandler (class) >
-
- <!ELEMENT propertymanager (class, propertyhandler*) >
- <!ELEMENT propertyhandler (class) >
-
- <!ELEMENT collection (nodetypes) >
- <!ELEMENT noncollection (nodetypes) >
-
- <!ELEMENT filter (class, namespaces?, nodetypes?) >
-
- <!ELEMENT class >
- <!ATTLIST class
- name CDATA #REQUIRED
- >
- <!ELEMENT namespaces (prefix | uri)* >
- <!ELEMENT prefix (CDATA) >
- <!ELEMENT uri (CDATA) >
-
- <!ELEMENT nodetypes (nodetype)* >
- <!ELEMENT nodetype (CDATA) >
-
- <!ELEMENT mimetypeproperties (mimemapping*, defaultmimetype) >
-
- <!ELEMENT mimemapping >
- <!ATTLIST mimemapping
- extension CDATA #REQUIRED
- mimetype CDATA #REQUIRED
- >
-
- <!ELEMENT defaultmimetype (CDATA) >
-]>
--->
-
-<config>
- <!--
- Defines the IOManager implementation that is responsible for passing
- import/export request to the individual IO-handlers.
- -->
- <iomanager>
- <!-- class element defines the manager to be used. The specified class
- must implement the IOManager interface.
- Note, that the handlers are being added and called in the order
- they appear in the configuration.
- -->
- <class name="org.apache.jackrabbit.server.io.IOManagerImpl" />
- <iohandler>
- <class name="org.apache.jackrabbit.server.io.VersionHandler" />
- </iohandler>
- <iohandler>
- <class name="org.apache.jackrabbit.server.io.VersionHistoryHandler" />
- </iohandler>
-<!-- <iohandler> -->
-<!-- <class name="org.apache.jackrabbit.server.io.ZipHandler" /> -->
-<!-- </iohandler> -->
-<!-- <iohandler> -->
-<!-- <class name="org.apache.jackrabbit.server.io.XmlHandler" /> -->
-<!-- </iohandler> -->
- <iohandler>
- <class name="org.apache.jackrabbit.server.io.DirListingExportHandler" />
- </iohandler>
- <iohandler>
- <class name="org.apache.jackrabbit.server.io.DefaultHandler" />
- </iohandler>
- </iomanager>
- <!--
- Example config for iomanager that populates its list of handlers with
- default values. Therefore the 'iohandler' elements are omited.
- -->
- <!--
- <iomanager>
- <class name="org.apache.jackrabbit.server.io.DefaultIOManager" />
- </iomanager>
- -->
- <!--
- Defines the PropertyManager implementation that is responsible for export
- and import of resource properties.
- -->
- <propertymanager>
- <!-- class element defines the manager to be used. The specified class
- must implement the PropertyManager interface.
- Note, that the handlers are being added and called in the order
- they appear in the configuration.
- -->
- <class name="org.apache.jackrabbit.server.io.PropertyManagerImpl" />
- <propertyhandler>
- <class name="org.apache.jackrabbit.server.io.VersionHandler" />
- </propertyhandler>
- <propertyhandler>
- <class name="org.apache.jackrabbit.server.io.VersionHistoryHandler" />
- </propertyhandler>
-<!-- <propertyhandler> -->
-<!-- <class name="org.apache.jackrabbit.server.io.ZipHandler" /> -->
-<!-- </propertyhandler> -->
-<!-- <propertyhandler> -->
-<!-- <class name="org.apache.jackrabbit.server.io.XmlHandler" /> -->
-<!-- </propertyhandler> -->
- <propertyhandler>
- <class name="org.apache.jackrabbit.server.io.DefaultHandler" />
- </propertyhandler>
- </propertymanager>
- <!--
- Define nodetypes, that should never by displayed as 'collection'
- -->
- <noncollection>
- <nodetypes>
- <nodetype>nt:file</nodetype>
- <nodetype>nt:resource</nodetype>
- </nodetypes>
- </noncollection>
- <!--
- Example: Defines nodetypes, that should always be displayed as 'collection'.
- -->
- <!--
- <collection>
- <nodetypes>
- <nodetype>nt:folder</nodetype>
- <nodetype>rep:root</nodetype>
- </nodetypes>
- </collection>
- -->
- <!--
- Filter that allows to prevent certain items from being displayed.
- Please note, that this has an effect on PROPFIND calls only and does not
- provide limited access to those items matching any of the filters.
-
- However specifying a filter may cause problems with PUT or MKCOL if the
- resource to be created is being filtered out, thus resulting in inconsistent
- responses (e.g. PUT followed by PROPFIND on parent).
- -->
- <filter>
- <!-- class element defines the resource filter to be used. The specified class
- must implement the ItemFilter interface -->
- <class name="org.apache.jackrabbit.webdav.simple.DefaultItemFilter" />
- <!--
- Nodetype names to be used to filter child nodes.
- A child node can be filtered if the declaring nodetype of its definition
- is one of the nodetype names specified in the nodetypes Element.
- E.g. defining 'rep:root' as filtered nodetype whould result in jcr:system
- being hidden but no other child node of the root node, since those
- are defined by the nodetype nt:unstructered.
- -->
- <!--
- <nodetypes>
- <nodetype>rep:root</nodetype>
- </nodetypes>
- -->
- <!--
- Namespace prefixes or uris. Items having a name that matches any of the
- entries will be filtered.
- -->
- <namespaces>
- <prefix>rep</prefix>
- <prefix>jcr</prefix>
- <!-- Argeo namespaces -->
- <prefix>node</prefix>
- <prefix>argeo</prefix>
- <prefix>cms</prefix>
- <prefix>slc</prefix>
- <prefix>connect</prefix>
- <prefix>activities</prefix>
- <prefix>people</prefix>
- <prefix>documents</prefix>
- <prefix>tracker</prefix>
- <!--
- <uri>internal</uri>
- <uri>http://www.jcp.org/jcr/1.0</uri>
- -->
- </namespaces>
- </filter>
-
- <!--
- Optional 'mimetypeproperties' element.
- It defines additional or replaces existing mappings for the MimeResolver
- instance created by the ResourceConfig.
- The default mappings are defined in org.apache.jackrabbit.server.io.mimetypes.properties.
- If the default mime type defined by MimeResolver is 'application/octet-stream'.
- -->
- <!--
- <mimetypeproperties>
- <mimemapping extension="rtf" mimetype="application/rtf" />
- <mimemapping extension="ott" mimetype="application/vnd.oasis.opendocument.text-template" />
- <defaultmimetype>text/html</defaultmimetype>
- </mimetypeproperties>
- -->
-</config>
+++ /dev/null
-package org.argeo.cms.internal.jcr;
-
-import java.util.Properties;
-
-import org.apache.jackrabbit.core.config.BeanConfig;
-import org.apache.jackrabbit.core.config.ConfigurationException;
-import org.apache.jackrabbit.core.config.RepositoryConfigurationParser;
-import org.apache.jackrabbit.core.config.WorkspaceSecurityConfig;
-import org.apache.jackrabbit.core.util.db.ConnectionFactory;
-import org.w3c.dom.Element;
-
-/**
- * A {@link RepositoryConfigurationParser} providing more flexibility with
- * classloaders.
- */
-@SuppressWarnings("restriction")
-class CustomRepositoryConfigurationParser extends RepositoryConfigurationParser {
- private ClassLoader classLoader = null;
-
- public CustomRepositoryConfigurationParser(Properties variables) {
- super(variables);
- }
-
- public CustomRepositoryConfigurationParser(Properties variables, ConnectionFactory connectionFactory) {
- super(variables, connectionFactory);
- }
-
- @Override
- protected RepositoryConfigurationParser createSubParser(Properties variables) {
- Properties props = new Properties(getVariables());
- props.putAll(variables);
- CustomRepositoryConfigurationParser subParser = new CustomRepositoryConfigurationParser(props,
- connectionFactory);
- subParser.setClassLoader(classLoader);
- return subParser;
- }
-
- @Override
- public WorkspaceSecurityConfig parseWorkspaceSecurityConfig(Element parent) throws ConfigurationException {
- WorkspaceSecurityConfig workspaceSecurityConfig = super.parseWorkspaceSecurityConfig(parent);
- workspaceSecurityConfig.getAccessControlProviderConfig().setClassLoader(classLoader);
- return workspaceSecurityConfig;
- }
-
- @Override
- protected BeanConfig parseBeanConfig(Element parent, String name) throws ConfigurationException {
- BeanConfig beanConfig = super.parseBeanConfig(parent, name);
- if (beanConfig.getClassName().startsWith("org.argeo")) {
- beanConfig.setClassLoader(classLoader);
- }
- return beanConfig;
- }
-
- public void setClassLoader(ClassLoader classLoader) {
- this.classLoader = classLoader;
- }
-
-}
+++ /dev/null
-package org.argeo.cms.internal.jcr;
-
-/** Pre-defined Jackrabbit repository configurations. */
-enum JackrabbitType {
- /** Local file system */
- localfs,
- /** Embedded Java H2 database */
- h2,
- /** Embedded Java H2 database in PostgreSQL compatibility mode */
- h2_postgresql,
- /** PostgreSQL */
- postgresql,
- /** PostgreSQL with datastore */
- postgresql_ds,
- /** PostgreSQL with cluster */
- postgresql_cluster,
- /** PostgreSQL with cluster and datastore */
- postgresql_cluster_ds,
- /** Memory */
- memory;
-}
+++ /dev/null
-package org.argeo.cms.internal.jcr;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Dictionary;
-import java.util.HashMap;
-import java.util.Hashtable;
-import java.util.Map;
-
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.RepositoryFactory;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.argeo.api.NodeConstants;
-import org.argeo.api.NodeDeployment;
-import org.argeo.jackrabbit.client.ClientDavexRepositoryFactory;
-import org.argeo.jcr.JcrException;
-import org.argeo.naming.LdapAttrs;
-import org.osgi.framework.BundleContext;
-import org.osgi.framework.Constants;
-import org.osgi.framework.FrameworkUtil;
-
-/** JCR specific init utilities. */
-public class JcrInitUtils {
- private final static Log log = LogFactory.getLog(JcrInitUtils.class);
- private final static BundleContext bundleContext = FrameworkUtil.getBundle(JcrInitUtils.class).getBundleContext();
-
- public static void addToDeployment(NodeDeployment nodeDeployment) {
- // node repository
-// Dictionary<String, Object> provided = null;
- Dictionary<String, Object> provided = nodeDeployment.getProps(NodeConstants.NODE_REPOS_FACTORY_PID,
- NodeConstants.NODE);
- Dictionary<String, Object> nodeConfig = JcrInitUtils.getNodeRepositoryConfig(provided);
- // node repository is mandatory
- nodeDeployment.addFactoryDeployConfig(NodeConstants.NODE_REPOS_FACTORY_PID, nodeConfig);
-
- // additional repositories
-// dataModels: for (DataModels.DataModel dataModel : dataModels.getNonAbstractDataModels()) {
-// if (NodeConstants.NODE_REPOSITORY.equals(dataModel.getName()))
-// continue dataModels;
-// Dictionary<String, Object> config = JcrInitUtils.getRepositoryConfig(dataModel.getName(),
-// getProps(NodeConstants.NODE_REPOS_FACTORY_PID, dataModel.getName()));
-// if (config.size() != 0)
-// putFactoryDeployConfig(NodeConstants.NODE_REPOS_FACTORY_PID, config);
-// }
-
- }
-
- /** Override the provided config with the framework properties */
- public static Dictionary<String, Object> getNodeRepositoryConfig(Dictionary<String, Object> provided) {
- Dictionary<String, Object> props = provided != null ? provided : new Hashtable<String, Object>();
- for (RepoConf repoConf : RepoConf.values()) {
- Object value = getFrameworkProp(NodeConstants.NODE_REPO_PROP_PREFIX + repoConf.name());
- if (value != null) {
- props.put(repoConf.name(), value);
- if (log.isDebugEnabled())
- log.debug("Set node repo configuration " + repoConf.name() + " to " + value);
- }
- }
- props.put(NodeConstants.CN, NodeConstants.NODE_REPOSITORY);
- return props;
- }
-
- public static Dictionary<String, Object> getRepositoryConfig(String dataModelName,
- Dictionary<String, Object> provided) {
- if (dataModelName.equals(NodeConstants.NODE_REPOSITORY) || dataModelName.equals(NodeConstants.EGO_REPOSITORY))
- throw new IllegalArgumentException("Data model '" + dataModelName + "' is reserved.");
- Dictionary<String, Object> props = provided != null ? provided : new Hashtable<String, Object>();
- for (RepoConf repoConf : RepoConf.values()) {
- Object value = getFrameworkProp(
- NodeConstants.NODE_REPOS_PROP_PREFIX + dataModelName + '.' + repoConf.name());
- if (value != null) {
- props.put(repoConf.name(), value);
- if (log.isDebugEnabled())
- log.debug("Set " + dataModelName + " repo configuration " + repoConf.name() + " to " + value);
- }
- }
- if (props.size() != 0)
- props.put(NodeConstants.CN, dataModelName);
- return props;
- }
-
- private static void registerRemoteInit(String uri) {
- try {
- Repository repository = createRemoteRepository(new URI(uri));
- Hashtable<String, Object> properties = new Hashtable<>();
- properties.put(NodeConstants.CN, NodeConstants.NODE_INIT);
- properties.put(LdapAttrs.labeledURI.name(), uri);
- properties.put(Constants.SERVICE_RANKING, -1000);
- bundleContext.registerService(Repository.class, repository, properties);
- } catch (RepositoryException e) {
- throw new JcrException(e);
- } catch (URISyntaxException e) {
- throw new IllegalArgumentException(e);
- }
- }
-
- private static Repository createRemoteRepository(URI uri) throws RepositoryException {
- RepositoryFactory repositoryFactory = new ClientDavexRepositoryFactory();
- Map<String, String> params = new HashMap<String, String>();
- params.put(ClientDavexRepositoryFactory.JACKRABBIT_DAVEX_URI, uri.toString());
- // TODO make it configurable
- params.put(ClientDavexRepositoryFactory.JACKRABBIT_REMOTE_DEFAULT_WORKSPACE, NodeConstants.SYS_WORKSPACE);
- return repositoryFactory.getRepository(params);
- }
-
- private static String getFrameworkProp(String key, String def) {
- String value;
- if (bundleContext != null)
- value = bundleContext.getProperty(key);
- else
- value = System.getProperty(key);
- if (value == null)
- return def;
- return value;
- }
-
- private static String getFrameworkProp(String key) {
- return getFrameworkProp(key, null);
- }
-
-}
+++ /dev/null
-package org.argeo.cms.internal.jcr;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-
-import org.apache.jackrabbit.core.data.DataIdentifier;
-import org.apache.jackrabbit.core.data.DataRecord;
-import org.apache.jackrabbit.core.data.DataStoreException;
-import org.apache.jackrabbit.core.data.FileDataStore;
-
-/**
- * <b>experimental</b> Duplicate added entries in another directory (typically a
- * remote mount).
- */
-@SuppressWarnings("restriction")
-public class LocalFsDataStore extends FileDataStore {
- String redundantPath;
- FileDataStore redundantStore;
-
- @Override
- public void init(String homeDir) {
- // init primary first
- super.init(homeDir);
-
- if (redundantPath != null) {
- // redundant directory must be created first
- // TODO implement some polling?
- if (Files.exists(Paths.get(redundantPath))) {
- redundantStore = new FileDataStore();
- redundantStore.setPath(redundantPath);
- redundantStore.init(homeDir);
- }
- }
- }
-
- @Override
- public DataRecord addRecord(InputStream input) throws DataStoreException {
- DataRecord dataRecord = super.addRecord(input);
- syncRedundantRecord(dataRecord);
- return dataRecord;
- }
-
- @Override
- public DataRecord getRecord(DataIdentifier identifier) throws DataStoreException {
- DataRecord dataRecord = super.getRecord(identifier);
- syncRedundantRecord(dataRecord);
- return dataRecord;
- }
-
- protected void syncRedundantRecord(DataRecord dataRecord) throws DataStoreException {
- if (redundantStore == null)
- return;
- if (redundantStore.getRecordIfStored(dataRecord.getIdentifier()) == null) {
- try (InputStream redundant = dataRecord.getStream()) {
- redundantStore.addRecord(redundant);
- } catch (IOException e) {
- throw new DataStoreException("Cannot add redundant record.", e);
- }
- }
- }
-
- public void setRedundantPath(String redundantPath) {
- this.redundantPath = redundantPath;
- }
-
-}
+++ /dev/null
-package org.argeo.cms.internal.jcr;
-
-import org.argeo.api.NodeConstants;
-import org.argeo.osgi.metatype.EnumAD;
-import org.argeo.osgi.metatype.EnumOCD;
-
-/** JCR repository configuration */
-public enum RepoConf implements EnumAD {
- /** Repository type */
- type("h2"),
- /** Default workspace */
- defaultWorkspace(NodeConstants.SYS_WORKSPACE),
- /** Database URL */
- dburl(null),
- /** Database user */
- dbuser(null),
- /** Database password */
- dbpassword(null),
-
- /** The identifier (can be an URL locating the repo) */
- labeledUri(null),
- //
- // JACKRABBIT SPECIFIC
- //
- /** Maximum database pool size */
- maxPoolSize(10),
- /** Maximum cache size in MB */
- maxCacheMB(null),
- /** Bundle cache size in MB */
- bundleCacheMB(8),
- /** Extractor pool size */
- extractorPoolSize(0),
- /** Search cache size */
- searchCacheSize(1000),
- /** Max volatile index size */
- maxVolatileIndexSize(1048576),
- /** Cluster id (if appropriate configuration) */
- clusterId("default"),
- /** Indexes base path */
- indexesBase(null);
-
- /** The default value. */
- private Object def;
- private String oid;
-
- RepoConf(String oid, Object def) {
- this.oid = oid;
- this.def = def;
- }
-
- RepoConf(Object def) {
- this.def = def;
- }
-
- public Object getDefault() {
- return def;
- }
-
- @Override
- public String getID() {
- if (oid != null)
- return oid;
- return EnumAD.super.getID();
- }
-
- public static class OCD extends EnumOCD<RepoConf> {
- public OCD(String locale) {
- super(RepoConf.class, locale);
- }
- }
-
-}
+++ /dev/null
-package org.argeo.cms.internal.jcr;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Dictionary;
-import java.util.Enumeration;
-import java.util.Properties;
-import java.util.UUID;
-
-import javax.jcr.RepositoryException;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.core.RepositoryContext;
-import org.apache.jackrabbit.core.RepositoryImpl;
-import org.apache.jackrabbit.core.cache.CacheManager;
-import org.apache.jackrabbit.core.config.RepositoryConfig;
-import org.apache.jackrabbit.core.config.RepositoryConfigurationParser;
-import org.argeo.api.NodeConstants;
-import org.argeo.cms.internal.kernel.CmsPaths;
-import org.xml.sax.InputSource;
-
-/** Can interpret properties in order to create an actual JCR repository. */
-public class RepositoryBuilder {
- private final static Log log = LogFactory.getLog(RepositoryBuilder.class);
-
- public RepositoryContext createRepositoryContext(Dictionary<String, ?> properties)
- throws RepositoryException, IOException {
- RepositoryConfig repositoryConfig = createRepositoryConfig(properties);
- RepositoryContext repositoryContext = createJackrabbitRepository(repositoryConfig);
- RepositoryImpl repository = repositoryContext.getRepository();
-
- // cache
- Object maxCacheMbStr = prop(properties, RepoConf.maxCacheMB);
- if (maxCacheMbStr != null) {
- Integer maxCacheMB = Integer.parseInt(maxCacheMbStr.toString());
- CacheManager cacheManager = repository.getCacheManager();
- cacheManager.setMaxMemory(maxCacheMB * 1024l * 1024l);
- cacheManager.setMaxMemoryPerCache((maxCacheMB / 4) * 1024l * 1024l);
- }
-
- return repositoryContext;
- }
-
- RepositoryConfig createRepositoryConfig(Dictionary<String, ?> properties) throws RepositoryException, IOException {
- JackrabbitType type = JackrabbitType.valueOf(prop(properties, RepoConf.type).toString());
- ClassLoader cl = getClass().getClassLoader();
- final String base = "/org/argeo/cms/internal/jcr";
- try (InputStream in = cl.getResourceAsStream(base + "/repository-" + type.name() + ".xml")) {
- if (in == null)
- throw new IllegalArgumentException("Repository configuration not found");
- InputSource config = new InputSource(in);
- Properties jackrabbitVars = getConfigurationProperties(type, properties);
- // RepositoryConfig repositoryConfig = RepositoryConfig.create(config,
- // jackrabbitVars);
-
- // custom configuration parser
- CustomRepositoryConfigurationParser parser = new CustomRepositoryConfigurationParser(jackrabbitVars);
- parser.setClassLoader(cl);
- RepositoryConfig repositoryConfig = parser.parseRepositoryConfig(config);
- repositoryConfig.init();
-
- // set the proper classloaders
- repositoryConfig.getSecurityConfig().getSecurityManagerConfig().setClassLoader(cl);
- repositoryConfig.getSecurityConfig().getAccessManagerConfig().setClassLoader(cl);
-// for (WorkspaceConfig workspaceConfig : repositoryConfig.getWorkspaceConfigs()) {
-// workspaceConfig.getSecurityConfig().getAccessControlProviderConfig().setClassLoader(cl);
-// }
- return repositoryConfig;
- }
- }
-
- private Properties getConfigurationProperties(JackrabbitType type, Dictionary<String, ?> properties) {
- Properties props = new Properties();
- for (Enumeration<String> keys = properties.keys(); keys.hasMoreElements();) {
- String key = keys.nextElement();
- props.put(key, properties.get(key));
- }
-
- // cluster id
- // cf. https://wiki.apache.org/jackrabbit/Clustering
- // TODO deal with multiple repos
- String clusterId = System.getProperty("org.apache.jackrabbit.core.cluster.node_id");
- String clusterIdProp = props.getProperty(RepoConf.clusterId.name());
- if (clusterId != null) {
- if (clusterIdProp != null)
- throw new IllegalArgumentException("Cluster id defined as System properties and in deploy config");
- props.put(RepoConf.clusterId.name(), clusterId);
- } else {
- clusterId = clusterIdProp;
- }
-
- // home
- String homeUri = props.getProperty(RepoConf.labeledUri.name());
- Path homePath;
- if (homeUri == null) {
- String cn = props.getProperty(NodeConstants.CN);
- assert cn != null;
- if (clusterId != null) {
- homePath = CmsPaths.getRepoDirPath(cn + '/' + clusterId);
- } else {
- homePath = CmsPaths.getRepoDirPath(cn);
- }
- } else {
- try {
- URI uri = new URI(homeUri);
- String host = uri.getHost();
- if (host == null || host.trim().equals("")) {
- homePath = Paths.get(uri).toAbsolutePath();
- } else {
- // TODO remote at this stage?
- throw new IllegalArgumentException("Cannot manage repository path for host " + host);
- }
- } catch (URISyntaxException e) {
- throw new IllegalArgumentException("Invalid repository home URI", e);
- }
- }
- // TODO use Jackrabbit API (?)
- Path rootUuidPath = homePath.resolve("repository/meta/rootUUID");
- try {
- if (!Files.exists(rootUuidPath)) {
- Files.createDirectories(rootUuidPath.getParent());
- Files.write(rootUuidPath, UUID.randomUUID().toString().getBytes());
- }
- // File homeDir = homePath.toFile();
- // homeDir.mkdirs();
- } catch (IOException e) {
- throw new RuntimeException("Cannot set up repository home " + homePath, e);
- }
- // home cannot be overridden
- props.put(RepositoryConfigurationParser.REPOSITORY_HOME_VARIABLE, homePath.toString());
-
- setProp(props, RepoConf.indexesBase, CmsPaths.getRepoIndexesBase().toString());
- // common
- setProp(props, RepoConf.defaultWorkspace);
- setProp(props, RepoConf.maxPoolSize);
- // Jackrabbit defaults
- setProp(props, RepoConf.bundleCacheMB);
- // See http://wiki.apache.org/jackrabbit/Search
- setProp(props, RepoConf.extractorPoolSize);
- setProp(props, RepoConf.searchCacheSize);
- setProp(props, RepoConf.maxVolatileIndexSize);
-
- // specific
- String dburl;
- switch (type) {
- case h2:
- dburl = "jdbc:h2:" + homePath.toAbsolutePath() + "/h2/repository";
- setProp(props, RepoConf.dburl, dburl);
- setProp(props, RepoConf.dbuser, "sa");
- setProp(props, RepoConf.dbpassword, "");
- break;
- case h2_postgresql:
- dburl = "jdbc:h2:" + homePath.toAbsolutePath() + "/h2/repository;MODE=PostgreSQL;DATABASE_TO_LOWER=TRUE";
- setProp(props, RepoConf.dburl, dburl);
- setProp(props, RepoConf.dbuser, "sa");
- setProp(props, RepoConf.dbpassword, "");
- break;
- case postgresql:
- case postgresql_ds:
- case postgresql_cluster:
- case postgresql_cluster_ds:
- dburl = "jdbc:postgresql://localhost/demo";
- setProp(props, RepoConf.dburl, dburl);
- setProp(props, RepoConf.dbuser, "argeo");
- setProp(props, RepoConf.dbpassword, "argeo");
- break;
- case memory:
- break;
- case localfs:
- break;
- default:
- throw new IllegalArgumentException("Unsupported node type " + type);
- }
- return props;
- }
-
- private void setProp(Properties props, RepoConf key, String def) {
- Object value = props.get(key.name());
- if (value == null)
- value = def;
- if (value == null)
- value = key.getDefault();
- if (value != null)
- props.put(key.name(), value.toString());
- }
-
- private void setProp(Properties props, RepoConf key) {
- setProp(props, key, null);
- }
-
- private String prop(Dictionary<String, ?> properties, RepoConf key) {
- Object value = properties.get(key.name());
- if (value == null)
- return key.getDefault() != null ? key.getDefault().toString() : null;
- else
- return value.toString();
- }
-
- private RepositoryContext createJackrabbitRepository(RepositoryConfig repositoryConfig) throws RepositoryException {
- ClassLoader currentContextCl = Thread.currentThread().getContextClassLoader();
- Thread.currentThread().setContextClassLoader(RepositoryBuilder.class.getClassLoader());
- try {
- long begin = System.currentTimeMillis();
- //
- // Actual repository creation
- //
- RepositoryContext repositoryContext = RepositoryContext.create(repositoryConfig);
-
- double duration = ((double) (System.currentTimeMillis() - begin)) / 1000;
- if (log.isDebugEnabled())
- log.debug(
- "Created Jackrabbit repository in " + duration + " s, home: " + repositoryConfig.getHomeDir());
-
- return repositoryContext;
- } finally {
- Thread.currentThread().setContextClassLoader(currentContextCl);
- }
- }
-
-}
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
- <!-- Shared datasource -->
- <DataSources>
- <DataSource name="dataSource">
- <param name="driver" value="org.h2.Driver" />
- <param name="url" value="${dburl}" />
- <param name="user" value="${dbuser}" />
- <param name="password" value="${dbpassword}" />
- <param name="databaseType" value="h2" />
- <param name="maxPoolSize" value="${maxPoolSize}" />
- </DataSource>
- </DataSources>
-
- <!-- File system and datastore -->
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="default" />
- <param name="schemaObjectPrefix" value="fs_" />
- </FileSystem>
- <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
- <param name="path" value="${rep.home}/datastore" />
- </DataStore>
-
- <!-- Workspace templates -->
- <Workspaces rootPath="${rep.home}/workspaces"
- defaultWorkspace="${defaultWorkspace}" />
- <Workspace name="${wsp.name}">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="default" />
- <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-<!-- <param name="tikaConfigPath" value="${indexesBase}/${cn}/tika-config.xml" /> -->
- <param name="supportHighlighting" value="true" />
- </SearchIndex>
- <WorkspaceSecurity>
- <AccessControlProvider
- class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
- </WorkspaceSecurity>
- </Workspace>
-
- <!-- Versioning -->
- <Versioning rootPath="${rep.home}/version">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="default" />
- <param name="schemaObjectPrefix" value="fs_ver_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="pm_ver_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- </Versioning>
-
- <!-- Indexing -->
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-<!-- <param name="tikaConfigPath" value="${indexesBase}/${cn}/tika-config.xml" /> -->
- <param name="supportHighlighting" value="true" />
- </SearchIndex>
-
- <!-- Security -->
- <Security appName="Jackrabbit">
- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
- workspaceName="security" />
- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
- </Security>
-</Repository>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
- <!-- Shared datasource -->
- <DataSources>
- <DataSource name="dataSource">
- <param name="driver" value="org.h2.Driver" />
- <param name="url" value="${dburl}" />
- <param name="user" value="${dbuser}" />
- <param name="password" value="${dbpassword}" />
- <param name="databaseType" value="postgresql" />
- <param name="maxPoolSize" value="${maxPoolSize}" />
- </DataSource>
- </DataSources>
-
- <!-- File system and datastore -->
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="default" />
- <param name="schemaObjectPrefix" value="fs_" />
- </FileSystem>
- <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
- <param name="path" value="${rep.home}/datastore" />
- </DataStore>
-
- <!-- Workspace templates -->
- <Workspaces rootPath="${rep.home}/workspaces"
- defaultWorkspace="${defaultWorkspace}" />
- <Workspace name="${wsp.name}">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="default" />
- <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-<!-- <param name="tikaConfigPath" value="${indexesBase}/${cn}/tika-config.xml" /> -->
- <param name="supportHighlighting" value="true" />
- </SearchIndex>
- <WorkspaceSecurity>
- <AccessControlProvider
- class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
- </WorkspaceSecurity>
- </Workspace>
-
- <!-- Versioning -->
- <Versioning rootPath="${rep.home}/version">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="default" />
- <param name="schemaObjectPrefix" value="fs_ver_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="pm_ver_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- </Versioning>
-
- <!-- Indexing -->
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-<!-- <param name="tikaConfigPath" value="${indexesBase}/${cn}/tika-config.xml" /> -->
- <param name="supportHighlighting" value="true" />
- </SearchIndex>
-
- <!-- Security -->
- <Security appName="Jackrabbit">
- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
- workspaceName="security" />
- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
- </Security>
-</Repository>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
- <!-- File system and datastore -->
- <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
- <param name="path" value="${rep.home}/repository" />
- </FileSystem>
- <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
- <param name="path" value="${rep.home}/datastore" />
- </DataStore>
-
- <!-- Workspace templates -->
- <Workspaces rootPath="${rep.home}/workspaces"
- defaultWorkspace="${defaultWorkspace}" />
- <Workspace name="${wsp.name}">
- <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
- <param name="path" value="${wsp.home}" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- </SearchIndex>
- <WorkspaceSecurity>
- <AccessControlProvider
- class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
- </WorkspaceSecurity>
- </Workspace>
-
- <!-- Versioning -->
- <Versioning rootPath="${rep.home}/version">
- <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
- <param name="path" value="${rep.home}/version" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- </Versioning>
-
- <!-- Indexing -->
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- </SearchIndex>
-
- <!-- Security -->
- <Security appName="Jackrabbit">
- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
- workspaceName="security" />
- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
- </Security>
-</Repository>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
- <!-- File system and datastore -->
- <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-
- <!-- Workspace templates -->
- <Workspaces rootPath="${rep.home}/workspaces"
- defaultWorkspace="${defaultWorkspace}" configRootPath="/workspaces" />
- <Workspace name="${wsp.name}">
- <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
- <param name="blobFSBlockSize" value="1" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
- <param name="directoryManagerClass"
- value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
- </SearchIndex>
- <WorkspaceSecurity>
- <AccessControlProvider
- class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
- </WorkspaceSecurity>
- </Workspace>
-
- <!-- Versioning -->
- <Versioning rootPath="${rep.home}/version">
- <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
- <param name="blobFSBlockSize" value="1" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- </Versioning>
-
- <!-- Indexing -->
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/index" />
- <param name="directoryManagerClass"
- value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
- </SearchIndex>
-
- <!-- Security -->
- <Security appName="Jackrabbit">
- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
- workspaceName="security" />
- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
- </Security>
-</Repository>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
- <!-- Shared datasource -->
- <DataSources>
- <DataSource name="dataSource">
- <param name="driver" value="org.postgresql.Driver" />
- <param name="url" value="${dburl}" />
- <param name="user" value="${dbuser}" />
- <param name="password" value="${dbpassword}" />
- <param name="databaseType" value="postgresql" />
- <param name="maxPoolSize" value="${maxPoolSize}" />
- </DataSource>
- </DataSources>
-
- <!-- File system and datastore -->
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="fs_" />
- </FileSystem>
-
- <!-- Workspace templates -->
- <Workspaces rootPath="${rep.home}/workspaces"
- defaultWorkspace="${defaultWorkspace}" />
- <Workspace name="${wsp.name}">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- </SearchIndex>
- <WorkspaceSecurity>
- <AccessControlProvider
- class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
- </WorkspaceSecurity>
- </Workspace>
-
- <!-- Versioning -->
- <Versioning rootPath="${rep.home}/version">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="fs_ver_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="pm_ver_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- </Versioning>
-
- <!-- Indexing -->
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- </SearchIndex>
-
- <!-- Security -->
- <Security appName="Jackrabbit">
- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
- workspaceName="security" />
- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
- </Security>
-</Repository>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
- <!-- Shared datasource -->
- <DataSources>
- <DataSource name="dataSource">
- <param name="driver" value="org.postgresql.Driver" />
- <param name="url" value="${dburl}" />
- <param name="user" value="${dbuser}" />
- <param name="password" value="${dbpassword}" />
- <param name="databaseType" value="postgresql" />
- <param name="maxPoolSize" value="${maxPoolSize}" />
- </DataSource>
- </DataSources>
-
- <!-- File system and datastore -->
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="fs_" />
- </FileSystem>
-
- <!-- Workspace templates -->
- <Workspaces rootPath="${rep.home}/workspaces"
- defaultWorkspace="${defaultWorkspace}" />
- <Workspace name="${wsp.name}">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- </SearchIndex>
- <WorkspaceSecurity>
- <AccessControlProvider
- class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
- </WorkspaceSecurity>
- </Workspace>
-
- <!-- Versioning -->
- <Versioning rootPath="${rep.home}/version">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="fs_ver_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="pm_ver_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- </Versioning>
-
- <!-- Indexing -->
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- </SearchIndex>
-
- <!-- Security -->
- <Security appName="Jackrabbit">
- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
- workspaceName="security" />
- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
- </Security>
-
- <!-- Clustering -->
- <Cluster id="${clusterId}">
- <Journal class="org.apache.jackrabbit.core.journal.DatabaseJournal">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="journal_" />
- </Journal>
- </Cluster>
-</Repository>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
- <!-- Shared datasource -->
- <DataSources>
- <DataSource name="dataSource">
- <param name="driver" value="org.postgresql.Driver" />
- <param name="url" value="${dburl}" />
- <param name="user" value="${dbuser}" />
- <param name="password" value="${dbpassword}" />
- <param name="databaseType" value="postgresql" />
- <param name="maxPoolSize" value="${maxPoolSize}" />
- </DataSource>
- </DataSources>
-
- <!-- File system and datastore -->
- <FileSystem
- class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="fs_" />
- </FileSystem>
- <DataStore
- class="org.argeo.cms.internal.jcr.LocalFsDataStore">
- <param name="path" value="${rep.home}/../datastore" />
- <param name="redundantPath" value="${rep.home}/../datastorer" />
- </DataStore>
-
- <!-- Workspace templates -->
- <Workspaces rootPath="${rep.home}/workspaces"
- defaultWorkspace="${defaultWorkspace}" />
- <Workspace name="${wsp.name}">
- <FileSystem
- class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- <SearchIndex
- class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path"
- value="${indexesBase}/${cn}/${wsp.name}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize"
- value="${maxVolatileIndexSize}" />
- </SearchIndex>
- <WorkspaceSecurity>
- <AccessControlProvider
- class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
- </WorkspaceSecurity>
- </Workspace>
-
- <!-- Versioning -->
- <Versioning rootPath="${rep.home}/version">
- <FileSystem
- class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="fs_ver_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="pm_ver_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- </Versioning>
-
- <!-- Indexing -->
- <SearchIndex
- class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize"
- value="${maxVolatileIndexSize}" />
- </SearchIndex>
-
- <!-- Security -->
- <Security appName="Jackrabbit">
- <SecurityManager
- class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
- workspaceName="security" />
- <AccessManager
- class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
- </Security>
-
- <!-- Clustering -->
- <Cluster id="${clusterId}" syncDelay="100">
- <Journal
- class="org.apache.jackrabbit.core.journal.DatabaseJournal">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="journal_" />
- </Journal>
- </Cluster>
-</Repository>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
- <!-- Shared datasource -->
- <DataSources>
- <DataSource name="dataSource">
- <param name="driver" value="org.postgresql.Driver" />
- <param name="url" value="${dburl}" />
- <param name="user" value="${dbuser}" />
- <param name="password" value="${dbpassword}" />
- <param name="databaseType" value="postgresql" />
- <param name="maxPoolSize" value="${maxPoolSize}" />
- </DataSource>
- </DataSources>
-
- <!-- File system and datastore -->
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="fs_" />
- </FileSystem>
- <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
- <param name="path" value="${rep.home}/datastore" />
- </DataStore>
-
- <!-- Workspace templates -->
- <Workspaces rootPath="${rep.home}/workspaces"
- defaultWorkspace="${defaultWorkspace}" />
- <Workspace name="${wsp.name}">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/${wsp.name}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- </SearchIndex>
- <WorkspaceSecurity>
- <AccessControlProvider
- class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
- </WorkspaceSecurity>
- </Workspace>
-
- <!-- Versioning -->
- <Versioning rootPath="${rep.home}/version">
- <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
- <param name="dataSourceName" value="dataSource" />
- <param name="schema" value="postgresql" />
- <param name="schemaObjectPrefix" value="fs_ver_" />
- </FileSystem>
- <PersistenceManager
- class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
- <param name="dataSourceName" value="dataSource" />
- <param name="schemaObjectPrefix" value="pm_ver_" />
- <param name="bundleCacheSize" value="${bundleCacheMB}" />
- </PersistenceManager>
- </Versioning>
-
- <!-- Indexing -->
- <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
- <param name="path" value="${indexesBase}/${cn}/index" />
- <param name="extractorPoolSize" value="${extractorPoolSize}" />
- <param name="cacheSize" value="${searchCacheSize}" />
- <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
- </SearchIndex>
-
- <!-- Security -->
- <Security appName="Jackrabbit">
- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
- workspaceName="security" />
- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
- </Security>
-</Repository>
\ No newline at end of file
// Node deployment
nodeDeployment = new CmsDeployment();
- registerService(NodeDeployment.class, nodeDeployment, null);
+// registerService(NodeDeployment.class, nodeDeployment, null);
// Node instance
nodeInstance = new CmsInstance();
package org.argeo.cms.internal.kernel;
-import static org.argeo.api.DataModelNamespace.CMS_DATA_MODEL_NAMESPACE;
-import static org.osgi.service.http.whiteboard.HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX;
-
-import java.io.File;
import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Reader;
import java.lang.management.ManagementFactory;
import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
import java.util.Dictionary;
-import java.util.HashSet;
-import java.util.Hashtable;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.security.auth.callback.CallbackHandler;
-import javax.servlet.Servlet;
+
import javax.transaction.UserTransaction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.commons.cnd.CndImporter;
-import org.apache.jackrabbit.core.RepositoryContext;
-import org.apache.jackrabbit.core.RepositoryImpl;
-import org.argeo.api.DataModelNamespace;
import org.argeo.api.NodeConstants;
import org.argeo.api.NodeDeployment;
import org.argeo.api.NodeState;
-import org.argeo.api.NodeUtils;
-import org.argeo.api.security.CryptoKeyring;
-import org.argeo.api.security.Keyring;
-import org.argeo.cms.ArgeoNames;
-import org.argeo.cms.internal.http.CmsRemotingServlet;
-import org.argeo.cms.internal.http.CmsWebDavServlet;
-import org.argeo.cms.internal.http.HttpUtils;
-import org.argeo.cms.internal.jcr.JcrInitUtils;
-import org.argeo.jcr.Jcr;
-import org.argeo.jcr.JcrException;
-import org.argeo.jcr.JcrUtils;
-import org.argeo.maintenance.backup.LogicalRestore;
-import org.argeo.naming.LdapAttrs;
import org.argeo.osgi.useradmin.UserAdminConf;
-import org.argeo.util.LangUtils;
import org.eclipse.equinox.http.jetty.JettyConfigurator;
-import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
-import org.osgi.framework.Constants;
import org.osgi.framework.FrameworkUtil;
-import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceReference;
-import org.osgi.framework.wiring.BundleCapability;
-import org.osgi.framework.wiring.BundleWire;
-import org.osgi.framework.wiring.BundleWiring;
import org.osgi.service.cm.Configuration;
import org.osgi.service.cm.ConfigurationAdmin;
-import org.osgi.service.cm.ManagedService;
import org.osgi.service.http.HttpService;
-import org.osgi.service.http.whiteboard.HttpWhiteboardConstants;
import org.osgi.service.useradmin.Group;
import org.osgi.service.useradmin.Role;
import org.osgi.service.useradmin.UserAdmin;
private final Log log = LogFactory.getLog(getClass());
private final BundleContext bc = FrameworkUtil.getBundle(getClass()).getBundleContext();
- private DataModels dataModels;
private DeployConfig deployConfig;
private Long availableSince;
-// private final boolean cleanState;
-
-// private NodeHttp nodeHttp;
- private String webDavConfig = HttpUtils.WEBDAV_CONFIG;
-
- private boolean argeoDataModelExtensionsAvailable = false;
// Readiness
private boolean nodeAvailable = false;
// cleanState = nodeState.isClean();
// nodeHttp = new NodeHttp();
- dataModels = new DataModels(bc);
initTrackers();
}
// httpSt.open();
KernelUtils.asyncOpen(httpSt);
- ServiceTracker<?, ?> repoContextSt = new RepositoryContextStc();
- // repoContextSt.open();
- KernelUtils.asyncOpen(repoContextSt);
ServiceTracker<?, ?> userAdminSt = new ServiceTracker<UserAdmin, UserAdmin>(bc, UserAdmin.class, null) {
@Override
} catch (Exception e) {
throw new IllegalStateException("Cannot analyse clean state", e);
}
- deployConfig = new DeployConfig(configurationAdmin, dataModels, isClean);
- JcrInitUtils.addToDeployment(CmsDeployment.this);
+ deployConfig = new DeployConfig(configurationAdmin, isClean);
+ Activator.registerService(NodeDeployment.class, CmsDeployment.this, null);
+// JcrInitUtils.addToDeployment(CmsDeployment.this);
httpExpected = deployConfig.getProps(KernelConstants.JETTY_FACTORY_PID, "default") != null;
try {
Configuration[] configs = configurationAdmin
// if (nodeHttp != null)
// nodeHttp.destroy();
- try {
- for (ServiceReference<JackrabbitLocalRepository> sr : bc
- .getServiceReferences(JackrabbitLocalRepository.class, null)) {
- bc.getService(sr).destroy();
- }
- } catch (InvalidSyntaxException e1) {
- log.error("Cannot sclean repsoitories", e1);
- }
try {
JettyConfigurator.stopServer(KernelConstants.DEFAULT_JETTY_SERVER);
}
}
- private void prepareNodeRepository(Repository deployedNodeRepository, List<String> publishAsLocalRepo) {
- if (availableSince != null) {
- throw new IllegalStateException("Deployment is already available");
- }
-
- // home
- prepareDataModel(NodeConstants.NODE_REPOSITORY, deployedNodeRepository, publishAsLocalRepo);
-
- // init from backup
- if (deployConfig.isFirstInit()) {
- Path restorePath = Paths.get(System.getProperty("user.dir"), "restore");
- if (Files.exists(restorePath)) {
- if (log.isDebugEnabled())
- log.debug("Found backup " + restorePath + ", restoring it...");
- LogicalRestore logicalRestore = new LogicalRestore(bc, deployedNodeRepository, restorePath);
- KernelUtils.doAsDataAdmin(logicalRestore);
- log.info("Restored backup from " + restorePath);
- }
- }
-
- // init from repository
- Collection<ServiceReference<Repository>> initRepositorySr;
- try {
- initRepositorySr = bc.getServiceReferences(Repository.class,
- "(" + NodeConstants.CN + "=" + NodeConstants.NODE_INIT + ")");
- } catch (InvalidSyntaxException e1) {
- throw new IllegalArgumentException(e1);
- }
- Iterator<ServiceReference<Repository>> it = initRepositorySr.iterator();
- while (it.hasNext()) {
- ServiceReference<Repository> sr = it.next();
- Object labeledUri = sr.getProperties().get(LdapAttrs.labeledURI.name());
- Repository initRepository = bc.getService(sr);
- if (log.isDebugEnabled())
- log.debug("Found init repository " + labeledUri + ", copying it...");
- initFromRepository(deployedNodeRepository, initRepository);
- log.info("Node repository initialised from " + labeledUri);
- }
- }
-
- /** Init from a (typically remote) repository. */
- private void initFromRepository(Repository deployedNodeRepository, Repository initRepository) {
- Session initSession = null;
- try {
- initSession = initRepository.login();
- workspaces: for (String workspaceName : initSession.getWorkspace().getAccessibleWorkspaceNames()) {
- if ("security".equals(workspaceName))
- continue workspaces;
- if (log.isDebugEnabled())
- log.debug("Copying workspace " + workspaceName + " from init repository...");
- long begin = System.currentTimeMillis();
- Session targetSession = null;
- Session sourceSession = null;
- try {
- try {
- targetSession = NodeUtils.openDataAdminSession(deployedNodeRepository, workspaceName);
- } catch (IllegalArgumentException e) {// no such workspace
- Session adminSession = NodeUtils.openDataAdminSession(deployedNodeRepository, null);
- try {
- adminSession.getWorkspace().createWorkspace(workspaceName);
- } finally {
- Jcr.logout(adminSession);
- }
- targetSession = NodeUtils.openDataAdminSession(deployedNodeRepository, workspaceName);
- }
- sourceSession = initRepository.login(workspaceName);
-// JcrUtils.copyWorkspaceXml(sourceSession, targetSession);
- // TODO deal with referenceable nodes
- JcrUtils.copy(sourceSession.getRootNode(), targetSession.getRootNode());
- targetSession.save();
- long duration = System.currentTimeMillis() - begin;
- if (log.isDebugEnabled())
- log.debug("Copied workspace " + workspaceName + " from init repository in " + (duration / 1000)
- + " s");
- } catch (Exception e) {
- log.error("Cannot copy workspace " + workspaceName + " from init repository.", e);
- } finally {
- Jcr.logout(sourceSession);
- Jcr.logout(targetSession);
- }
- }
- } catch (RepositoryException e) {
- throw new JcrException(e);
- } finally {
- Jcr.logout(initSession);
- }
- }
-
- private void prepareHomeRepository(RepositoryImpl deployedRepository) {
- Session adminSession = KernelUtils.openAdminSession(deployedRepository);
- try {
- argeoDataModelExtensionsAvailable = Arrays
- .asList(adminSession.getWorkspace().getNamespaceRegistry().getURIs())
- .contains(ArgeoNames.ARGEO_NAMESPACE);
- } catch (RepositoryException e) {
- log.warn("Cannot check whether Argeo namespace is registered assuming it isn't.", e);
- argeoDataModelExtensionsAvailable = false;
- } finally {
- JcrUtils.logoutQuietly(adminSession);
- }
-
- // Publish home with the highest service ranking
- Hashtable<String, Object> regProps = new Hashtable<>();
- regProps.put(NodeConstants.CN, NodeConstants.EGO_REPOSITORY);
- regProps.put(Constants.SERVICE_RANKING, Integer.MAX_VALUE);
- Repository egoRepository = new EgoRepository(deployedRepository, false);
- bc.registerService(Repository.class, egoRepository, regProps);
- registerRepositoryServlets(NodeConstants.EGO_REPOSITORY, egoRepository);
-
- // Keyring only if Argeo extensions are available
- if (argeoDataModelExtensionsAvailable) {
- new ServiceTracker<CallbackHandler, CallbackHandler>(bc, CallbackHandler.class, null) {
-
- @Override
- public CallbackHandler addingService(ServiceReference<CallbackHandler> reference) {
- NodeKeyRing nodeKeyring = new NodeKeyRing(egoRepository);
- CallbackHandler callbackHandler = bc.getService(reference);
- nodeKeyring.setDefaultCallbackHandler(callbackHandler);
- bc.registerService(LangUtils.names(Keyring.class, CryptoKeyring.class, ManagedService.class),
- nodeKeyring, LangUtils.dict(Constants.SERVICE_PID, NodeConstants.NODE_KEYRING_PID));
- return callbackHandler;
- }
-
- }.open();
- }
- }
-
- /** Session is logged out. */
- private void prepareDataModel(String cn, Repository repository, List<String> publishAsLocalRepo) {
- Session adminSession = KernelUtils.openAdminSession(repository);
- try {
- Set<String> processed = new HashSet<String>();
- bundles: for (Bundle bundle : bc.getBundles()) {
- BundleWiring wiring = bundle.adapt(BundleWiring.class);
- if (wiring == null)
- continue bundles;
- if (NodeConstants.NODE_REPOSITORY.equals(cn))// process all data models
- processWiring(cn, adminSession, wiring, processed, false, publishAsLocalRepo);
- else {
- List<BundleCapability> capabilities = wiring.getCapabilities(CMS_DATA_MODEL_NAMESPACE);
- for (BundleCapability capability : capabilities) {
- String dataModelName = (String) capability.getAttributes().get(DataModelNamespace.NAME);
- if (dataModelName.equals(cn))// process only own data model
- processWiring(cn, adminSession, wiring, processed, false, publishAsLocalRepo);
- }
- }
- }
- } finally {
- JcrUtils.logoutQuietly(adminSession);
- }
- }
-
- private void processWiring(String cn, Session adminSession, BundleWiring wiring, Set<String> processed,
- boolean importListedAbstractModels, List<String> publishAsLocalRepo) {
- // recursively process requirements first
- List<BundleWire> requiredWires = wiring.getRequiredWires(CMS_DATA_MODEL_NAMESPACE);
- for (BundleWire wire : requiredWires) {
- processWiring(cn, adminSession, wire.getProviderWiring(), processed, true, publishAsLocalRepo);
- }
-
- List<BundleCapability> capabilities = wiring.getCapabilities(CMS_DATA_MODEL_NAMESPACE);
- capabilities: for (BundleCapability capability : capabilities) {
- if (!importListedAbstractModels
- && KernelUtils.asBoolean((String) capability.getAttributes().get(DataModelNamespace.ABSTRACT))) {
- continue capabilities;
- }
- boolean publish = registerDataModelCapability(cn, adminSession, capability, processed);
- if (publish)
- publishAsLocalRepo.add((String) capability.getAttributes().get(DataModelNamespace.NAME));
- }
- }
-
- private boolean registerDataModelCapability(String cn, Session adminSession, BundleCapability capability,
- Set<String> processed) {
- Map<String, Object> attrs = capability.getAttributes();
- String name = (String) attrs.get(DataModelNamespace.NAME);
- if (processed.contains(name)) {
- if (log.isTraceEnabled())
- log.trace("Data model " + name + " has already been processed");
- return false;
- }
-
- // CND
- String path = (String) attrs.get(DataModelNamespace.CND);
- if (path != null) {
- File dataModel = bc.getBundle().getDataFile("dataModels/" + path);
- if (!dataModel.exists()) {
- URL url = capability.getRevision().getBundle().getResource(path);
- if (url == null)
- throw new IllegalArgumentException("No data model '" + name + "' found under path " + path);
- try (Reader reader = new InputStreamReader(url.openStream())) {
- CndImporter.registerNodeTypes(reader, adminSession, true);
- processed.add(name);
- dataModel.getParentFile().mkdirs();
- dataModel.createNewFile();
- if (log.isDebugEnabled())
- log.debug("Registered CND " + url);
- } catch (Exception e) {
- log.error("Cannot import CND " + url, e);
- }
- }
- }
-
- if (KernelUtils.asBoolean((String) attrs.get(DataModelNamespace.ABSTRACT)))
- return false;
- // Non abstract
- boolean isStandalone = deployConfig.isStandalone(name);
- boolean publishLocalRepo;
- if (isStandalone && name.equals(cn))// includes the node itself
- publishLocalRepo = true;
- else if (!isStandalone && cn.equals(NodeConstants.NODE_REPOSITORY))
- publishLocalRepo = true;
- else
- publishLocalRepo = false;
-
- return publishLocalRepo;
- }
-
- private void publishLocalRepo(String dataModelName, Repository repository) {
- Hashtable<String, Object> properties = new Hashtable<>();
- properties.put(NodeConstants.CN, dataModelName);
- LocalRepository localRepository;
- String[] classes;
- if (repository instanceof RepositoryImpl) {
- localRepository = new JackrabbitLocalRepository((RepositoryImpl) repository, dataModelName);
- classes = new String[] { Repository.class.getName(), LocalRepository.class.getName(),
- JackrabbitLocalRepository.class.getName() };
- } else {
- localRepository = new LocalRepository(repository, dataModelName);
- classes = new String[] { Repository.class.getName(), LocalRepository.class.getName() };
- }
- bc.registerService(classes, localRepository, properties);
-
- // TODO make it configurable
- registerRepositoryServlets(dataModelName, localRepository);
- if (log.isTraceEnabled())
- log.trace("Published data model " + dataModelName);
- }
@Override
public synchronized Long getAvailableSince() {
return availableSince != null;
}
- protected void registerRepositoryServlets(String alias, Repository repository) {
- registerRemotingServlet(alias, repository);
- registerWebdavServlet(alias, repository);
- }
-
- protected void registerWebdavServlet(String alias, Repository repository) {
- CmsWebDavServlet webdavServlet = new CmsWebDavServlet(alias, repository);
- Hashtable<String, String> ip = new Hashtable<>();
- ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsWebDavServlet.INIT_PARAM_RESOURCE_CONFIG, webDavConfig);
- ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsWebDavServlet.INIT_PARAM_RESOURCE_PATH_PREFIX,
- "/" + alias);
-
- ip.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/" + alias + "/*");
- ip.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_SELECT,
- "(" + HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_PATH + "=" + NodeConstants.PATH_DATA + ")");
- bc.registerService(Servlet.class, webdavServlet, ip);
- }
-
- protected void registerRemotingServlet(String alias, Repository repository) {
- CmsRemotingServlet remotingServlet = new CmsRemotingServlet(alias, repository);
- Hashtable<String, String> ip = new Hashtable<>();
- ip.put(NodeConstants.CN, alias);
- // Properties ip = new Properties();
- ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_RESOURCE_PATH_PREFIX,
- "/" + alias);
- ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_AUTHENTICATE_HEADER,
- "Negotiate");
-
- // Looks like a bug in Jackrabbit remoting init
- Path tmpDir;
- try {
- tmpDir = Files.createTempDirectory("remoting_" + alias);
- } catch (IOException e) {
- throw new RuntimeException("Cannot create temp directory for remoting servlet", e);
- }
- ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_HOME, tmpDir.toString());
- ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_TMP_DIRECTORY,
- "remoting_" + alias);
- ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_PROTECTED_HANDLERS_CONFIG,
- HttpUtils.DEFAULT_PROTECTED_HANDLERS);
- ip.put(HTTP_WHITEBOARD_SERVLET_INIT_PARAM_PREFIX + CmsRemotingServlet.INIT_PARAM_CREATE_ABSOLUTE_URI, "false");
-
- ip.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_SERVLET_PATTERN, "/" + alias + "/*");
- ip.put(HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_SELECT,
- "(" + HttpWhiteboardConstants.HTTP_WHITEBOARD_CONTEXT_PATH + "=" + NodeConstants.PATH_JCR + ")");
- bc.registerService(Servlet.class, remotingServlet, ip);
- }
-
- private class RepositoryContextStc extends ServiceTracker<RepositoryContext, RepositoryContext> {
-
- public RepositoryContextStc() {
- super(bc, RepositoryContext.class, null);
- }
-
- @Override
- public RepositoryContext addingService(ServiceReference<RepositoryContext> reference) {
- RepositoryContext repoContext = bc.getService(reference);
- String cn = (String) reference.getProperty(NodeConstants.CN);
- if (cn != null) {
- List<String> publishAsLocalRepo = new ArrayList<>();
- if (cn.equals(NodeConstants.NODE_REPOSITORY)) {
-// JackrabbitDataModelMigration.clearRepositoryCaches(repoContext.getRepositoryConfig());
- prepareNodeRepository(repoContext.getRepository(), publishAsLocalRepo);
- // TODO separate home repository
- prepareHomeRepository(repoContext.getRepository());
- registerRepositoryServlets(cn, repoContext.getRepository());
- nodeAvailable = true;
- checkReadiness();
- } else {
- prepareDataModel(cn, repoContext.getRepository(), publishAsLocalRepo);
- }
- // Publish all at once, so that bundles with multiple CNDs are consistent
- for (String dataModelName : publishAsLocalRepo)
- publishLocalRepo(dataModelName, repoContext.getRepository());
- }
- return repoContext;
- }
-
- @Override
- public void modifiedService(ServiceReference<RepositoryContext> reference, RepositoryContext service) {
- }
-
- @Override
- public void removedService(ServiceReference<RepositoryContext> reference, RepositoryContext service) {
- }
-
- }
}
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.file.FileSystem;
-import java.nio.file.FileSystemAlreadyExistsException;
-import java.nio.file.Path;
-import java.nio.file.spi.FileSystemProvider;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.jcr.Node;
-import javax.jcr.Property;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.RepositoryFactory;
-import javax.jcr.Session;
-import javax.jcr.nodetype.NodeType;
-
-import org.argeo.api.NodeConstants;
-import org.argeo.api.NodeUtils;
-import org.argeo.cms.auth.CurrentUser;
-import org.argeo.jackrabbit.fs.AbstractJackrabbitFsProvider;
-import org.argeo.jcr.fs.JcrFileSystem;
-import org.argeo.jcr.fs.JcrFileSystemProvider;
-import org.argeo.jcr.fs.JcrFsException;
-import org.osgi.framework.BundleContext;
-import org.osgi.framework.FrameworkUtil;
-import org.osgi.framework.InvalidSyntaxException;
-
-/** Implementation of an {@link FileSystemProvider} based on Jackrabbit. */
-public class CmsFsProvider extends AbstractJackrabbitFsProvider {
- private Map<String, CmsFileSystem> fileSystems = new HashMap<>();
-
- @Override
- public String getScheme() {
- return NodeConstants.SCHEME_NODE;
- }
-
- @Override
- public FileSystem newFileSystem(URI uri, Map<String, ?> env) throws IOException {
- BundleContext bc = FrameworkUtil.getBundle(CmsFsProvider.class).getBundleContext();
- String username = CurrentUser.getUsername();
- if (username == null) {
- // TODO deal with anonymous
- return null;
- }
- if (fileSystems.containsKey(username))
- throw new FileSystemAlreadyExistsException("CMS file system already exists for user " + username);
-
- try {
- String host = uri.getHost();
- if (host != null && !host.trim().equals("")) {
- URI repoUri = new URI("http", uri.getUserInfo(), uri.getHost(), uri.getPort(), "/jcr/node", null, null);
- RepositoryFactory repositoryFactory = bc.getService(bc.getServiceReference(RepositoryFactory.class));
- Repository repository = NodeUtils.getRepositoryByUri(repositoryFactory, repoUri.toString());
- CmsFileSystem fileSystem = new CmsFileSystem(this, repository);
- fileSystems.put(username, fileSystem);
- return fileSystem;
- } else {
- Repository repository = bc.getService(
- bc.getServiceReferences(Repository.class, "(cn=" + NodeConstants.EGO_REPOSITORY + ")")
- .iterator().next());
-// Session session = repository.login();
- CmsFileSystem fileSystem = new CmsFileSystem(this, repository);
- fileSystems.put(username, fileSystem);
- return fileSystem;
- }
- } catch (InvalidSyntaxException | URISyntaxException e) {
- throw new IllegalArgumentException("Cannot open file system " + uri + " for user " + username, e);
- }
- }
-
- @Override
- public FileSystem getFileSystem(URI uri) {
- return currentUserFileSystem();
- }
-
- @Override
- public Path getPath(URI uri) {
- JcrFileSystem fileSystem = currentUserFileSystem();
- String path = uri.getPath();
- if (fileSystem == null)
- try {
- fileSystem = (JcrFileSystem) newFileSystem(uri, new HashMap<String, Object>());
- } catch (IOException e) {
- throw new JcrFsException("Could not autocreate file system", e);
- }
- return fileSystem.getPath(path);
- }
-
- protected JcrFileSystem currentUserFileSystem() {
- String username = CurrentUser.getUsername();
- return fileSystems.get(username);
- }
-
- public Node getUserHome(Repository repository) {
- try {
- Session session = repository.login(NodeConstants.HOME_WORKSPACE);
- return NodeUtils.getUserHome(session);
- } catch (RepositoryException e) {
- throw new IllegalStateException("Cannot get user home", e);
- }
- }
-
- static class CmsFileSystem extends JcrFileSystem {
- public CmsFileSystem(JcrFileSystemProvider provider, Repository repository) throws IOException {
- super(provider, repository);
- }
-
- public boolean skipNode(Node node) throws RepositoryException {
-// if (node.isNodeType(NodeType.NT_HIERARCHY_NODE) || node.isNodeType(NodeTypes.NODE_USER_HOME)
-// || node.isNodeType(NodeTypes.NODE_GROUP_HOME))
- if (node.isNodeType(NodeType.NT_HIERARCHY_NODE))
- return false;
- // FIXME Better identifies home
- if (node.hasProperty(Property.JCR_ID))
- return false;
- return true;
- }
-
- }
-}
package org.argeo.cms.internal.kernel;
-import javax.jcr.Repository;
import javax.naming.ldap.LdapName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.argeo.api.NodeConstants;
import org.argeo.api.NodeInstance;
-import org.argeo.cms.CmsException;
import org.osgi.framework.BundleContext;
import org.osgi.framework.FrameworkUtil;
-import org.osgi.framework.ServiceReference;
-import org.osgi.util.tracker.ServiceTracker;
public class CmsInstance implements NodeInstance {
private final Log log = LogFactory.getLog(getClass());
private final BundleContext bc = FrameworkUtil.getBundle(getClass()).getBundleContext();
- private EgoRepository egoRepository;
+// private EgoRepository egoRepository;
public CmsInstance() {
initTrackers();
private void initTrackers() {
// node repository
- new ServiceTracker<Repository, Repository>(bc, Repository.class, null) {
- @Override
- public Repository addingService(ServiceReference<Repository> reference) {
- Object cn = reference.getProperty(NodeConstants.CN);
- if (cn != null && cn.equals(NodeConstants.EGO_REPOSITORY)) {
-// egoRepository = (EgoRepository) bc.getService(reference);
- if (log.isTraceEnabled())
- log.trace("Home repository is available");
- }
- return super.addingService(reference);
- }
-
- @Override
- public void removedService(ServiceReference<Repository> reference, Repository service) {
- super.removedService(reference, service);
-// egoRepository = null;
- }
-
- }.open();
+// new ServiceTracker<Repository, Repository>(bc, Repository.class, null) {
+// @Override
+// public Repository addingService(ServiceReference<Repository> reference) {
+// Object cn = reference.getProperty(NodeConstants.CN);
+// if (cn != null && cn.equals(NodeConstants.EGO_REPOSITORY)) {
+//// egoRepository = (EgoRepository) bc.getService(reference);
+// if (log.isTraceEnabled())
+// log.trace("Home repository is available");
+// }
+// return super.addingService(reference);
+// }
+//
+// @Override
+// public void removedService(ServiceReference<Repository> reference, Repository service) {
+// super.removedService(reference, service);
+//// egoRepository = null;
+// }
+//
+// }.open();
}
public void shutdown() {
@Override
public void createWorkgroup(LdapName dn) {
- if (egoRepository == null)
- throw new CmsException("Ego repository is not available");
- // TODO add check that the group exists
- egoRepository.createWorkgroup(dn);
+// if (egoRepository == null)
+// throw new CmsException("Ego repository is not available");
+// // TODO add check that the group exists
+// egoRepository.createWorkgroup(dn);
+ throw new UnsupportedOperationException();
}
}
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.nio.file.Path;
-
-/** Centralises access to the default node deployment directories. */
-public class CmsPaths {
- public static Path getRepoDirPath(String cn) {
- return KernelUtils.getOsgiInstancePath(KernelConstants.DIR_REPOS + '/' + cn);
- }
-
- public static Path getRepoIndexesBase() {
- return KernelUtils.getOsgiInstancePath(KernelConstants.DIR_INDEXES);
- }
-
- /** Singleton. */
- private CmsPaths() {
- }
-}
import java.net.InetAddress;
import java.net.UnknownHostException;
-import java.nio.file.spi.FileSystemProvider;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
-import javax.jcr.RepositoryFactory;
import javax.transaction.TransactionManager;
import javax.transaction.UserTransaction;
private List<Locale> locales = null;
private ThreadGroup threadGroup = new ThreadGroup("CMS");
- private KernelThread kernelThread;
private List<Runnable> stopHooks = new ArrayList<>();
private final String stateUuid;
initI18n();
initServices();
- // kernel thread
- kernelThread = new KernelThread(threadGroup, "Kernel Thread");
- kernelThread.setContextClassLoader(getClass().getClassLoader());
- kernelThread.start();
}
private void initI18n() {
// ocrParser.setLanguage("ara");
// bc.registerService(Parser.class, ocrParser, new Hashtable());
- // JCR
- RepositoryServiceFactory repositoryServiceFactory = new RepositoryServiceFactory();
- stopHooks.add(() -> repositoryServiceFactory.shutdown());
- Activator.registerService(ManagedServiceFactory.class, repositoryServiceFactory,
- LangUtils.dict(Constants.SERVICE_PID, NodeConstants.NODE_REPOS_FACTORY_PID));
-
- NodeRepositoryFactory repositoryFactory = new NodeRepositoryFactory();
- Activator.registerService(RepositoryFactory.class, repositoryFactory, null);
+// // JCR
+// RepositoryServiceFactory repositoryServiceFactory = new RepositoryServiceFactory();
+// stopHooks.add(() -> repositoryServiceFactory.shutdown());
+// Activator.registerService(ManagedServiceFactory.class, repositoryServiceFactory,
+// LangUtils.dict(Constants.SERVICE_PID, NodeConstants.NODE_REPOS_FACTORY_PID));
+//
+// NodeRepositoryFactory repositoryFactory = new NodeRepositoryFactory();
+// Activator.registerService(RepositoryFactory.class, repositoryFactory, null);
// Security
NodeUserAdmin userAdmin = new NodeUserAdmin(NodeConstants.ROLES_BASEDN, NodeConstants.TOKENS_BASEDN);
Activator.registerService(ManagedServiceFactory.class, userAdmin,
LangUtils.dict(Constants.SERVICE_PID, NodeConstants.NODE_USER_ADMIN_PID));
- // File System
- CmsFsProvider cmsFsProvider = new CmsFsProvider();
-// ServiceLoader<FileSystemProvider> fspSl = ServiceLoader.load(FileSystemProvider.class);
-// for (FileSystemProvider fsp : fspSl) {
-// log.debug("FileSystemProvider " + fsp);
-// if (fsp instanceof CmsFsProvider) {
-// cmsFsProvider = (CmsFsProvider) fsp;
-// }
-// }
-// for (FileSystemProvider fsp : FileSystemProvider.installedProviders()) {
-// log.debug("Installed FileSystemProvider " + fsp);
-// }
- Activator.registerService(FileSystemProvider.class, cmsFsProvider,
- LangUtils.dict(Constants.SERVICE_PID, NodeConstants.NODE_FS_PROVIDER_PID));
}
private void initSimpleTransactionManager() {
if (log.isDebugEnabled())
log.debug("CMS stopping... (" + this.stateUuid + ")");
- if (kernelThread != null)
- kernelThread.destroyAndJoin();
// In a different thread in order to avoid interruptions
Thread stopHookThread = new Thread(() -> applyStopHooks(), "Apply Argeo Stop Hooks");
stopHookThread.start();
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.util.GregorianCalendar;
-import java.util.concurrent.LinkedBlockingDeque;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import javax.jcr.Node;
-import javax.jcr.Property;
-import javax.jcr.PropertyType;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.Value;
-import javax.jcr.nodetype.NodeType;
-import javax.jcr.observation.Event;
-import javax.jcr.observation.EventIterator;
-import javax.jcr.observation.EventListener;
-import javax.jcr.version.VersionManager;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.api.JackrabbitValue;
-import org.apache.jackrabbit.core.RepositoryImpl;
-import org.argeo.jcr.JcrUtils;
-
-/** Ensure consistency of files, folder and last modified nodes. */
-class CmsWorkspaceIndexer implements EventListener {
- private final static Log log = LogFactory.getLog(CmsWorkspaceIndexer.class);
-
-// private final static String MIX_ETAG = "mix:etag";
- private final static String JCR_ETAG = "jcr:etag";
-// private final static String JCR_LAST_MODIFIED = "jcr:lastModified";
-// private final static String JCR_LAST_MODIFIED_BY = "jcr:lastModifiedBy";
-// private final static String JCR_MIXIN_TYPES = "jcr:mixinTypes";
- private final static String JCR_DATA = "jcr:data";
- private final static String JCR_CONTENT = "jcr:data";
-
- private String cn;
- private String workspaceName;
- private RepositoryImpl repositoryImpl;
- private Session session;
- private VersionManager versionManager;
-
- private LinkedBlockingDeque<Event> toProcess = new LinkedBlockingDeque<>();
- private IndexingThread indexingThread;
- private AtomicBoolean stopping = new AtomicBoolean(false);
-
- public CmsWorkspaceIndexer(RepositoryImpl repositoryImpl, String cn, String workspaceName)
- throws RepositoryException {
- this.cn = cn;
- this.workspaceName = workspaceName;
- this.repositoryImpl = repositoryImpl;
- }
-
- public void init() {
- session = KernelUtils.openAdminSession(repositoryImpl, workspaceName);
- try {
- String[] nodeTypes = { NodeType.NT_FILE, NodeType.MIX_LAST_MODIFIED };
- session.getWorkspace().getObservationManager().addEventListener(this,
- Event.NODE_ADDED | Event.PROPERTY_CHANGED, "/", true, null, nodeTypes, true);
- versionManager = session.getWorkspace().getVersionManager();
-
- indexingThread = new IndexingThread();
- indexingThread.start();
- } catch (RepositoryException e1) {
- throw new IllegalStateException(e1);
- }
- }
-
- public void destroy() {
- stopping.set(true);
- indexingThread.interrupt();
- // TODO make it configurable
- try {
- indexingThread.join(10 * 60 * 1000);
- } catch (InterruptedException e1) {
- log.warn("Indexing thread interrupted. Will log out session.");
- }
-
- try {
- session.getWorkspace().getObservationManager().removeEventListener(this);
- } catch (RepositoryException e) {
- if (log.isTraceEnabled())
- log.warn("Cannot unregistered JCR event listener", e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- private synchronized void processEvents(EventIterator events) {
- long begin = System.currentTimeMillis();
- long count = 0;
- while (events.hasNext()) {
- Event event = events.nextEvent();
- try {
- toProcess.put(event);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
-// processEvent(event);
- count++;
- }
- long duration = System.currentTimeMillis() - begin;
- if (log.isTraceEnabled())
- log.trace("Processed " + count + " events in " + duration + " ms");
- notifyAll();
- }
-
- protected synchronized void processEvent(Event event) {
- try {
- String eventPath = event.getPath();
- if (event.getType() == Event.NODE_ADDED) {
- if (!versionManager.isCheckedOut(eventPath))
- return;// ignore checked-in nodes
- if (log.isTraceEnabled())
- log.trace("NODE_ADDED " + eventPath);
-// session.refresh(true);
- session.refresh(false);
- Node node = session.getNode(eventPath);
- Node parentNode = node.getParent();
- if (parentNode.isNodeType(NodeType.NT_FILE)) {
- if (node.isNodeType(NodeType.NT_UNSTRUCTURED)) {
- if (!node.isNodeType(NodeType.MIX_LAST_MODIFIED))
- node.addMixin(NodeType.MIX_LAST_MODIFIED);
- Property property = node.getProperty(Property.JCR_DATA);
- String etag = toEtag(property.getValue());
- session.save();
- node.setProperty(JCR_ETAG, etag);
- if (log.isTraceEnabled())
- log.trace("ETag and last modified added to new " + node);
- } else if (node.isNodeType(NodeType.NT_RESOURCE)) {
-// if (!node.isNodeType(MIX_ETAG))
-// node.addMixin(MIX_ETAG);
-// session.save();
-// Property property = node.getProperty(Property.JCR_DATA);
-// String etag = toEtag(property.getValue());
-// node.setProperty(JCR_ETAG, etag);
-// session.save();
- }
-// setLastModifiedRecursive(parentNode, event);
-// session.save();
-// if (log.isTraceEnabled())
-// log.trace("ETag and last modified added to new " + node);
- }
-
-// if (node.isNodeType(NodeType.NT_FOLDER)) {
-// setLastModifiedRecursive(node, event);
-// session.save();
-// if (log.isTraceEnabled())
-// log.trace("Last modified added to new " + node);
-// }
- } else if (event.getType() == Event.PROPERTY_CHANGED) {
- String propertyName = extractItemName(eventPath);
- // skip if last modified properties are explicitly set
- if (!propertyName.equals(JCR_DATA))
- return;
-// if (propertyName.equals(JCR_LAST_MODIFIED))
-// return;
-// if (propertyName.equals(JCR_LAST_MODIFIED_BY))
-// return;
-// if (propertyName.equals(JCR_MIXIN_TYPES))
-// return;
-// if (propertyName.equals(JCR_ETAG))
-// return;
-
- if (log.isTraceEnabled())
- log.trace("PROPERTY_CHANGED " + eventPath);
-
- if (!session.propertyExists(eventPath))
- return;
- session.refresh(false);
- Property property = session.getProperty(eventPath);
- Node node = property.getParent();
- if (property.getType() == PropertyType.BINARY && propertyName.equals(JCR_DATA)
- && node.isNodeType(NodeType.NT_UNSTRUCTURED)) {
- String etag = toEtag(property.getValue());
- node.setProperty(JCR_ETAG, etag);
- Node parentNode = node.getParent();
- if (parentNode.isNodeType(NodeType.MIX_LAST_MODIFIED)) {
- setLastModified(parentNode, event);
- }
- if (log.isTraceEnabled())
- log.trace("ETag and last modified updated for " + node);
- }
-// setLastModified(node, event);
-// session.save();
-// if (log.isTraceEnabled())
-// log.trace("ETag and last modified updated for " + node);
- } else if (event.getType() == Event.NODE_REMOVED) {
- String removeNodePath = eventPath;
- String nodeName = extractItemName(eventPath);
- if (JCR_CONTENT.equals(nodeName)) // parent is a file, deleted anyhow
- return;
- if (log.isTraceEnabled())
- log.trace("NODE_REMOVED " + eventPath);
-// String parentPath = JcrUtils.parentPath(removeNodePath);
-// session.refresh(true);
-// setLastModified(parentPath, event);
-// session.save();
- if (log.isTraceEnabled())
- log.trace("Last modified updated for parents of removed " + removeNodePath);
- }
- } catch (Exception e) {
- if (log.isTraceEnabled())
- log.warn("Cannot process event " + event, e);
- } finally {
-// try {
-// session.refresh(true);
-// if (session.hasPendingChanges())
-// session.save();
-//// session.refresh(false);
-// } catch (RepositoryException e) {
-// if (log.isTraceEnabled())
-// log.warn("Cannot refresh JCR session", e);
-// }
- }
-
- }
-
- private String extractItemName(String path) {
- if (path == null || path.length() <= 1)
- return null;
- int lastIndex = path.lastIndexOf('/');
- if (lastIndex >= 0) {
- return path.substring(lastIndex + 1);
- } else {
- return path;
- }
- }
-
- @Override
- public void onEvent(EventIterator events) {
- processEvents(events);
-// Runnable toRun = new Runnable() {
-//
-// @Override
-// public void run() {
-// processEvents(events);
-// }
-// };
-// Future<?> future = Activator.getInternalExecutorService().submit(toRun);
-// try {
-// // make the call synchronous
-// future.get(60, TimeUnit.SECONDS);
-// } catch (TimeoutException | ExecutionException | InterruptedException e) {
-// // silent
-// }
- }
-
- static String toEtag(Value v) {
- if (v instanceof JackrabbitValue) {
- JackrabbitValue value = (JackrabbitValue) v;
- return '\"' + value.getContentIdentity() + '\"';
- } else {
- return null;
- }
-
- }
-
- protected synchronized void setLastModified(Node node, Event event) throws RepositoryException {
- GregorianCalendar calendar = new GregorianCalendar();
- calendar.setTimeInMillis(event.getDate());
- node.setProperty(Property.JCR_LAST_MODIFIED, calendar);
- node.setProperty(Property.JCR_LAST_MODIFIED_BY, event.getUserID());
- if (log.isTraceEnabled())
- log.trace("Last modified set on " + node);
- }
-
- /** Recursively set the last updated time on parents. */
- protected synchronized void setLastModifiedRecursive(Node node, Event event) throws RepositoryException {
- if (versionManager.isCheckedOut(node.getPath())) {
- if (node.isNodeType(NodeType.MIX_LAST_MODIFIED)) {
- setLastModified(node, event);
- }
- if (node.isNodeType(NodeType.NT_FOLDER) && !node.isNodeType(NodeType.MIX_LAST_MODIFIED)) {
- node.addMixin(NodeType.MIX_LAST_MODIFIED);
- if (log.isTraceEnabled())
- log.trace("Last modified mix-in added to " + node);
- }
-
- }
-
- // end condition
- if (node.getDepth() == 0) {
-// try {
-// node.getSession().save();
-// } catch (RepositoryException e) {
-// log.warn("Cannot index workspace", e);
-// }
- return;
- } else {
- Node parent = node.getParent();
- setLastModifiedRecursive(parent, event);
- }
- }
-
- /**
- * Recursively set the last updated time on parents. Useful to use paths when
- * dealing with deletions.
- */
- protected synchronized void setLastModifiedRecursive(String path, Event event) throws RepositoryException {
- // root node will always exist, so end condition is delegated to the other
- // recursive setLastModified method
- if (session.nodeExists(path)) {
- setLastModifiedRecursive(session.getNode(path), event);
- } else {
- setLastModifiedRecursive(JcrUtils.parentPath(path), event);
- }
- }
-
- @Override
- public String toString() {
- return "Indexer for workspace " + workspaceName + " of repository " + cn;
- }
-
- class IndexingThread extends Thread {
-
- public IndexingThread() {
- super(CmsWorkspaceIndexer.this.toString());
- // TODO Auto-generated constructor stub
- }
-
- @Override
- public void run() {
- life: while (session != null && session.isLive()) {
- try {
- Event nextEvent = toProcess.take();
- processEvent(nextEvent);
- } catch (InterruptedException e) {
- // silent
- interrupted();
- }
-
- if (stopping.get() && toProcess.isEmpty()) {
- break life;
- }
- }
- if (log.isDebugEnabled())
- log.debug(CmsWorkspaceIndexer.this.toString() + " has shut down.");
- }
-
- }
-
-}
\ No newline at end of file
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import static org.argeo.api.DataModelNamespace.CMS_DATA_MODEL_NAMESPACE;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.argeo.api.DataModelNamespace;
-import org.argeo.cms.CmsException;
-import org.osgi.framework.Bundle;
-import org.osgi.framework.BundleContext;
-import org.osgi.framework.BundleEvent;
-import org.osgi.framework.BundleListener;
-import org.osgi.framework.wiring.BundleCapability;
-import org.osgi.framework.wiring.BundleWire;
-import org.osgi.framework.wiring.BundleWiring;
-
-class DataModels implements BundleListener {
- private final static Log log = LogFactory.getLog(DataModels.class);
-
- private Map<String, DataModel> dataModels = new TreeMap<>();
-
- public DataModels(BundleContext bc) {
- for (Bundle bundle : bc.getBundles())
- processBundle(bundle, null);
- bc.addBundleListener(this);
- }
-
- public List<DataModel> getNonAbstractDataModels() {
- List<DataModel> res = new ArrayList<>();
- for (String name : dataModels.keySet()) {
- DataModel dataModel = dataModels.get(name);
- if (!dataModel.isAbstract())
- res.add(dataModel);
- }
- // TODO reorder?
- return res;
- }
-
- @Override
- public void bundleChanged(BundleEvent event) {
- if (event.getType() == Bundle.RESOLVED) {
- processBundle(event.getBundle(), null);
- } else if (event.getType() == Bundle.UNINSTALLED) {
- BundleWiring wiring = event.getBundle().adapt(BundleWiring.class);
- List<BundleCapability> providedDataModels = wiring.getCapabilities(CMS_DATA_MODEL_NAMESPACE);
- if (providedDataModels.size() == 0)
- return;
- for (BundleCapability bundleCapability : providedDataModels) {
- dataModels.remove(bundleCapability.getAttributes().get(DataModelNamespace.NAME));
- }
- }
-
- }
-
- protected void processBundle(Bundle bundle, List<Bundle> scannedBundles) {
- if (scannedBundles != null && scannedBundles.contains(bundle))
- throw new IllegalStateException("Cycle in CMS data model requirements for " + bundle);
- BundleWiring wiring = bundle.adapt(BundleWiring.class);
- if (wiring == null) {
- int bundleState = bundle.getState();
- if (bundleState != Bundle.INSTALLED && bundleState != Bundle.UNINSTALLED) {// ignore unresolved bundles
- log.warn("Bundle " + bundle.getSymbolicName() + " #" + bundle.getBundleId() + " ("
- + bundle.getLocation() + ") cannot be adapted to a wiring");
- } else {
- if (log.isTraceEnabled())
- log.warn("Bundle " + bundle.getSymbolicName() + " is not resolved.");
- }
- return;
- }
- List<BundleCapability> providedDataModels = wiring.getCapabilities(CMS_DATA_MODEL_NAMESPACE);
- if (providedDataModels.size() == 0)
- return;
- List<BundleWire> requiredDataModels = wiring.getRequiredWires(CMS_DATA_MODEL_NAMESPACE);
- // process requirements first
- for (BundleWire bundleWire : requiredDataModels) {
- List<Bundle> nextScannedBundles = new ArrayList<>();
- if (scannedBundles != null)
- nextScannedBundles.addAll(scannedBundles);
- nextScannedBundles.add(bundle);
- Bundle providerBundle = bundleWire.getProvider().getBundle();
- processBundle(providerBundle, nextScannedBundles);
- }
- for (BundleCapability bundleCapability : providedDataModels) {
- String name = (String) bundleCapability.getAttributes().get(DataModelNamespace.NAME);
- assert name != null;
- if (!dataModels.containsKey(name)) {
- DataModel dataModel = new DataModel(name, bundleCapability, requiredDataModels);
- dataModels.put(dataModel.getName(), dataModel);
- }
- }
- }
-
- /** Return a negative depth if dataModel is required by ref, 0 otherwise. */
- static int required(DataModel ref, DataModel dataModel, int depth) {
- for (DataModel dm : ref.getRequired()) {
- if (dm.equals(dataModel))// found here
- return depth - 1;
- int d = required(dm, dataModel, depth - 1);
- if (d != 0)// found deeper
- return d;
- }
- return 0;// not found
- }
-
- class DataModel {
- private final String name;
- private final boolean abstrct;
- // private final boolean standalone;
- private final String cnd;
- private final List<DataModel> required;
-
- private DataModel(String name, BundleCapability bundleCapability, List<BundleWire> requiredDataModels) {
- assert CMS_DATA_MODEL_NAMESPACE.equals(bundleCapability.getNamespace());
- this.name = name;
- Map<String, Object> attrs = bundleCapability.getAttributes();
- abstrct = KernelUtils.asBoolean((String) attrs.get(DataModelNamespace.ABSTRACT));
- // standalone = KernelUtils.asBoolean((String)
- // attrs.get(DataModelNamespace.CAPABILITY_STANDALONE_ATTRIBUTE));
- cnd = (String) attrs.get(DataModelNamespace.CND);
- List<DataModel> req = new ArrayList<>();
- for (BundleWire wire : requiredDataModels) {
- String requiredDataModelName = (String) wire.getCapability().getAttributes()
- .get(DataModelNamespace.NAME);
- assert requiredDataModelName != null;
- DataModel requiredDataModel = dataModels.get(requiredDataModelName);
- if (requiredDataModel == null)
- throw new CmsException("No required data model " + requiredDataModelName);
- req.add(requiredDataModel);
- }
- required = Collections.unmodifiableList(req);
- }
-
- public String getName() {
- return name;
- }
-
- public boolean isAbstract() {
- return abstrct;
- }
-
- // public boolean isStandalone() {
- // return !isAbstract();
- // }
-
- public String getCnd() {
- return cnd;
- }
-
- public List<DataModel> getRequired() {
- return required;
- }
-
- // @Override
- // public int compareTo(DataModel o) {
- // if (equals(o))
- // return 0;
- // int res = required(this, o, 0);
- // if (res != 0)
- // return res;
- // // the other way round
- // res = required(o, this, 0);
- // if (res != 0)
- // return -res;
- // return 0;
- // }
-
- @Override
- public int hashCode() {
- return name.hashCode();
- }
-
- @Override
- public boolean equals(Object obj) {
- if (obj instanceof DataModel)
- return ((DataModel) obj).name.equals(name);
- return false;
- }
-
- @Override
- public String toString() {
- return "Data model " + name;
- }
-
- }
-
-}
private static Path deployConfigPath = KernelUtils.getOsgiInstancePath(KernelConstants.DEPLOY_CONFIG_PATH);
private SortedMap<LdapName, Attributes> deployConfigs = new TreeMap<>();
- private final DataModels dataModels;
+// private final DataModels dataModels;
private boolean isFirstInit = false;
private ConfigurationAdmin configurationAdmin;
- public DeployConfig(ConfigurationAdmin configurationAdmin, DataModels dataModels, boolean isClean) {
- this.dataModels = dataModels;
+ public DeployConfig(ConfigurationAdmin configurationAdmin, boolean isClean) {
+// this.dataModels = dataModels;
// ConfigurationAdmin configurationAdmin =
// bc.getService(bc.getServiceReference(ConfigurationAdmin.class));
try {
}
}
- boolean isStandalone(String dataModelName) {
- return getProps(NodeConstants.NODE_REPOS_FACTORY_PID, dataModelName) != null;
- }
-
/*
* UTILITIES
*/
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.security.PrivilegedAction;
-import java.text.SimpleDateFormat;
-import java.util.HashSet;
-import java.util.Set;
-
-import javax.jcr.Node;
-import javax.jcr.Property;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.nodetype.NodeType;
-import javax.jcr.security.Privilege;
-import javax.naming.InvalidNameException;
-import javax.naming.ldap.LdapName;
-import javax.security.auth.Subject;
-import javax.security.auth.login.LoginContext;
-
-import org.argeo.api.NodeConstants;
-import org.argeo.api.NodeUtils;
-import org.argeo.cms.CmsException;
-import org.argeo.jcr.JcrException;
-import org.argeo.jcr.JcrRepositoryWrapper;
-import org.argeo.jcr.JcrUtils;
-
-/**
- * Make sure each user has a home directory available.
- */
-class EgoRepository extends JcrRepositoryWrapper implements KernelConstants {
-
- /** The home base path. */
-// private String homeBasePath = KernelConstants.DEFAULT_HOME_BASE_PATH;
-// private String usersBasePath = KernelConstants.DEFAULT_USERS_BASE_PATH;
-// private String groupsBasePath = KernelConstants.DEFAULT_GROUPS_BASE_PATH;
-
- private Set<String> checkedUsers = new HashSet<String>();
-
- private SimpleDateFormat usersDatePath = new SimpleDateFormat("YYYY/MM");
-
- private String defaultHomeWorkspace = NodeConstants.HOME_WORKSPACE;
- private String defaultGroupsWorkspace = NodeConstants.SRV_WORKSPACE;
-// private String defaultGuestsWorkspace = NodeConstants.GUESTS_WORKSPACE;
- private final boolean remote;
-
- public EgoRepository(Repository repository, boolean remote) {
- super(repository);
- this.remote = remote;
- putDescriptor(NodeConstants.CN, NodeConstants.EGO_REPOSITORY);
- if (!remote) {
- LoginContext lc;
- try {
- lc = new LoginContext(NodeConstants.LOGIN_CONTEXT_DATA_ADMIN);
- lc.login();
- } catch (javax.security.auth.login.LoginException e1) {
- throw new IllegalStateException("Cannot login as system", e1);
- }
- Subject.doAs(lc.getSubject(), new PrivilegedAction<Void>() {
-
- @Override
- public Void run() {
- loginOrCreateWorkspace(defaultHomeWorkspace);
- loginOrCreateWorkspace(defaultGroupsWorkspace);
- return null;
- }
-
- });
- }
- }
-
- private void loginOrCreateWorkspace(String workspace) {
- Session adminSession = null;
- try {
- adminSession = JcrUtils.loginOrCreateWorkspace(getRepository(workspace), workspace);
-// JcrUtils.addPrivilege(adminSession, "/", NodeConstants.ROLE_USER, Privilege.JCR_READ);
-
-// initJcr(adminSession);
- } catch (RepositoryException e) {
- throw new JcrException("Cannot init JCR home", e);
- } finally {
- JcrUtils.logoutQuietly(adminSession);
- }
- }
-
-// @Override
-// public Session login(Credentials credentials, String workspaceName)
-// throws LoginException, NoSuchWorkspaceException, RepositoryException {
-// if (workspaceName == null) {
-// return super.login(credentials, getUserHomeWorkspace());
-// } else {
-// return super.login(credentials, workspaceName);
-// }
-// }
-
- protected String getUserHomeWorkspace() {
- // TODO base on JAAS Subject metadata
- return defaultHomeWorkspace;
- }
-
- protected String getGroupsWorkspace() {
- // TODO base on JAAS Subject metadata
- return defaultGroupsWorkspace;
- }
-
-// protected String getGuestsWorkspace() {
-// // TODO base on JAAS Subject metadata
-// return defaultGuestsWorkspace;
-// }
-
- @Override
- protected void processNewSession(Session session, String workspaceName) {
- String username = session.getUserID();
- if (username == null || username.toString().equals(""))
- return;
- if (session.getUserID().equals(NodeConstants.ROLE_ANONYMOUS))
- return;
-
- String userHomeWorkspace = getUserHomeWorkspace();
- if (workspaceName == null || !workspaceName.equals(userHomeWorkspace))
- return;
-
- if (checkedUsers.contains(username))
- return;
- Session adminSession = KernelUtils.openAdminSession(getRepository(workspaceName), workspaceName);
- try {
- syncJcr(adminSession, username);
- checkedUsers.add(username);
- } finally {
- JcrUtils.logoutQuietly(adminSession);
- }
- }
-
- /*
- * JCR
- */
- /** Session is logged out. */
- private void initJcr(Session adminSession) {
- try {
-// JcrUtils.mkdirs(adminSession, homeBasePath);
-// JcrUtils.mkdirs(adminSession, groupsBasePath);
- adminSession.save();
-
-// JcrUtils.addPrivilege(adminSession, homeBasePath, NodeConstants.ROLE_USER_ADMIN, Privilege.JCR_READ);
-// JcrUtils.addPrivilege(adminSession, groupsBasePath, NodeConstants.ROLE_USER_ADMIN, Privilege.JCR_READ);
- adminSession.save();
- } catch (RepositoryException e) {
- throw new CmsException("Cannot initialize home repository", e);
- } finally {
- JcrUtils.logoutQuietly(adminSession);
- }
- }
-
- protected synchronized void syncJcr(Session adminSession, String username) {
- // only in the default workspace
-// if (workspaceName != null)
-// return;
- // skip system users
- if (username.endsWith(NodeConstants.ROLES_BASEDN))
- return;
-
- try {
- Node userHome = NodeUtils.getUserHome(adminSession, username);
- if (userHome == null) {
-// String homePath = generateUserPath(username);
- String userId = extractUserId(username);
-// if (adminSession.itemExists(homePath))// duplicate user id
-// userHome = adminSession.getNode(homePath).getParent().addNode(JcrUtils.lastPathElement(homePath));
-// else
-// userHome = JcrUtils.mkdirs(adminSession, homePath);
- userHome = adminSession.getRootNode().addNode(userId);
-// userHome.addMixin(NodeTypes.NODE_USER_HOME);
- userHome.addMixin(NodeType.MIX_CREATED);
- userHome.addMixin(NodeType.MIX_TITLE);
- userHome.setProperty(Property.JCR_ID, username);
- // TODO use display name
- userHome.setProperty(Property.JCR_TITLE, userId);
-// userHome.setProperty(NodeNames.LDAP_UID, username);
- adminSession.save();
-
- JcrUtils.clearAccessControList(adminSession, userHome.getPath(), username);
- JcrUtils.addPrivilege(adminSession, userHome.getPath(), username, Privilege.JCR_ALL);
-// JackrabbitSecurityUtils.denyPrivilege(adminSession, userHome.getPath(), NodeConstants.ROLE_USER,
-// Privilege.JCR_READ);
- }
- if (adminSession.hasPendingChanges())
- adminSession.save();
- } catch (RepositoryException e) {
- JcrUtils.discardQuietly(adminSession);
- throw new JcrException("Cannot sync node security model for " + username, e);
- }
- }
-
- /** Generate path for a new user home */
- private String generateUserPath(String username) {
- LdapName dn;
- try {
- dn = new LdapName(username);
- } catch (InvalidNameException e) {
- throw new CmsException("Invalid name " + username, e);
- }
- String userId = dn.getRdn(dn.size() - 1).getValue().toString();
- return '/' + userId;
-// int atIndex = userId.indexOf('@');
-// if (atIndex < 0) {
-// return homeBasePath+'/' + userId;
-// } else {
-// return usersBasePath + '/' + usersDatePath.format(new Date()) + '/' + userId;
-// }
- }
-
- private String extractUserId(String username) {
- LdapName dn;
- try {
- dn = new LdapName(username);
- } catch (InvalidNameException e) {
- throw new CmsException("Invalid name " + username, e);
- }
- String userId = dn.getRdn(dn.size() - 1).getValue().toString();
- return userId;
-// int atIndex = userId.indexOf('@');
-// if (atIndex < 0) {
-// return homeBasePath+'/' + userId;
-// } else {
-// return usersBasePath + '/' + usersDatePath.format(new Date()) + '/' + userId;
-// }
- }
-
- public void createWorkgroup(LdapName dn) {
- String groupsWorkspace = getGroupsWorkspace();
- Session adminSession = KernelUtils.openAdminSession(getRepository(groupsWorkspace), groupsWorkspace);
- String cn = dn.getRdn(dn.size() - 1).getValue().toString();
- Node newWorkgroup = NodeUtils.getGroupHome(adminSession, cn);
- if (newWorkgroup != null) {
- JcrUtils.logoutQuietly(adminSession);
- throw new CmsException("Workgroup " + newWorkgroup + " already exists for " + dn);
- }
- try {
- // TODO enhance transformation of cn to a valid node name
- // String relPath = cn.replaceAll("[^a-zA-Z0-9]", "_");
- String relPath = JcrUtils.replaceInvalidChars(cn);
- newWorkgroup = adminSession.getRootNode().addNode(relPath, NodeType.NT_UNSTRUCTURED);
-// newWorkgroup = JcrUtils.mkdirs(adminSession.getNode(groupsBasePath), relPath, NodeType.NT_UNSTRUCTURED);
-// newWorkgroup.addMixin(NodeTypes.NODE_GROUP_HOME);
- newWorkgroup.addMixin(NodeType.MIX_CREATED);
- newWorkgroup.addMixin(NodeType.MIX_TITLE);
- newWorkgroup.setProperty(Property.JCR_ID, dn.toString());
- newWorkgroup.setProperty(Property.JCR_TITLE, cn);
-// newWorkgroup.setProperty(NodeNames.LDAP_CN, cn);
- adminSession.save();
- JcrUtils.addPrivilege(adminSession, newWorkgroup.getPath(), dn.toString(), Privilege.JCR_ALL);
- adminSession.save();
- } catch (RepositoryException e) {
- throw new CmsException("Cannot create workgroup", e);
- } finally {
- JcrUtils.logoutQuietly(adminSession);
- }
-
- }
-
- public boolean isRemote() {
- return remote;
- }
-
-}
import java.io.Reader;
import java.net.InetAddress;
import java.net.URI;
-import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Dictionary;
-import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
-import java.util.Map;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.RepositoryFactory;
import javax.security.auth.x500.X500Principal;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.LogFactory;
import org.argeo.api.NodeConstants;
import org.argeo.cms.internal.http.InternalHttpConstants;
-import org.argeo.cms.internal.jcr.RepoConf;
-import org.argeo.jackrabbit.client.ClientDavexRepositoryFactory;
-import org.argeo.jcr.JcrException;
-import org.argeo.naming.LdapAttrs;
import org.argeo.osgi.useradmin.UserAdminConf;
-import org.osgi.framework.BundleContext;
-import org.osgi.framework.Constants;
/**
* Interprets framework properties in order to generate the initial deploy
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.util.Map;
-import java.util.TreeMap;
-
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.core.RepositoryImpl;
-import org.argeo.api.NodeConstants;
-
-class JackrabbitLocalRepository extends LocalRepository {
- private final static Log log = LogFactory.getLog(JackrabbitLocalRepository.class);
- final String SECURITY_WORKSPACE = "security";
-
- private Map<String, CmsWorkspaceIndexer> workspaceMonitors = new TreeMap<>();
-
- public JackrabbitLocalRepository(RepositoryImpl repository, String cn) {
- super(repository, cn);
-// Session session = KernelUtils.openAdminSession(repository);
-// try {
-// if (NodeConstants.NODE.equals(cn))
-// for (String workspaceName : session.getWorkspace().getAccessibleWorkspaceNames()) {
-// addMonitor(workspaceName);
-// }
-// } catch (RepositoryException e) {
-// throw new IllegalStateException(e);
-// } finally {
-// JcrUtils.logoutQuietly(session);
-// }
- }
-
- protected RepositoryImpl getJackrabbitrepository(String workspaceName) {
- return (RepositoryImpl) getRepository(workspaceName);
- }
-
- @Override
- protected synchronized void processNewSession(Session session, String workspaceName) {
-// String realWorkspaceName = session.getWorkspace().getName();
-// addMonitor(realWorkspaceName);
- }
-
- private void addMonitor(String realWorkspaceName) {
- if (realWorkspaceName.equals(SECURITY_WORKSPACE))
- return;
- if (!NodeConstants.NODE_REPOSITORY.equals(getCn()))
- return;
-
- if (!workspaceMonitors.containsKey(realWorkspaceName)) {
- try {
- CmsWorkspaceIndexer workspaceMonitor = new CmsWorkspaceIndexer(
- getJackrabbitrepository(realWorkspaceName), getCn(), realWorkspaceName);
- workspaceMonitors.put(realWorkspaceName, workspaceMonitor);
- workspaceMonitor.init();
- if (log.isDebugEnabled())
- log.debug("Registered " + workspaceMonitor);
- } catch (RepositoryException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
- }
-
- public void destroy() {
- for (String workspaceName : workspaceMonitors.keySet()) {
- workspaceMonitors.get(workspaceName).destroy();
- }
- }
-
-}
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.io.File;
-import java.lang.management.ManagementFactory;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.api.stats.RepositoryStatistics;
-import org.apache.jackrabbit.stats.RepositoryStatisticsImpl;
-import org.argeo.cms.internal.auth.CmsSessionImpl;
-
-/**
- * Background thread started by the kernel, which gather statistics and
- * monitor/control other processes.
- */
-class KernelThread extends Thread {
- private final static Log log = LogFactory.getLog(KernelThread.class);
-
- private RepositoryStatisticsImpl repoStats;
-
- /** The smallest period of operation, in ms */
- private final long PERIOD = 60 * 1000l;
- /** One ms in ns */
- private final static long m = 1000l * 1000l;
- private final static long M = 1024l * 1024l;
-
- private boolean running = true;
-
- private Log kernelStatsLog = LogFactory.getLog("argeo.stats.kernel");
- private Log nodeStatsLog = LogFactory.getLog("argeo.stats.node");
-
- @SuppressWarnings("unused")
- private long cycle = 0l;
-
- public KernelThread(ThreadGroup threadGroup, String name) {
- super(threadGroup, name);
- }
-
- private void doSmallestPeriod() {
- // Clean expired sessions
- CmsSessionImpl.closeInvalidSessions();
-
- if (kernelStatsLog.isDebugEnabled()) {
- StringBuilder line = new StringBuilder(64);
- line.append("§\t");
- long freeMem = Runtime.getRuntime().freeMemory() / M;
- long totalMem = Runtime.getRuntime().totalMemory() / M;
- long maxMem = Runtime.getRuntime().maxMemory() / M;
- double loadAvg = ManagementFactory.getOperatingSystemMXBean().getSystemLoadAverage();
- // in min
- boolean min = true;
- long uptime = ManagementFactory.getRuntimeMXBean().getUptime() / (1000 * 60);
- if (uptime > 24 * 60) {
- min = false;
- uptime = uptime / 60;
- }
- line.append(uptime).append(min ? " min" : " h").append('\t');
- line.append(loadAvg).append('\t').append(maxMem).append('\t').append(totalMem).append('\t').append(freeMem)
- .append('\t');
- kernelStatsLog.debug(line);
- }
-
- if (nodeStatsLog.isDebugEnabled()) {
- File dataDir = KernelUtils.getOsgiInstanceDir();
- long freeSpace = dataDir.getUsableSpace() / M;
- // File currentRoot = null;
- // for (File root : File.listRoots()) {
- // String rootPath = root.getAbsolutePath();
- // if (dataDir.getAbsolutePath().startsWith(rootPath)) {
- // if (currentRoot == null
- // || (rootPath.length() > currentRoot.getPath()
- // .length())) {
- // currentRoot = root;
- // }
- // }
- // }
- // long totalSpace = currentRoot.getTotalSpace();
- StringBuilder line = new StringBuilder(128);
- line.append("§\t").append(freeSpace).append(" MB left in " + dataDir);
- line.append('\n');
- if (repoStats != null)
- for (RepositoryStatistics.Type type : RepositoryStatistics.Type.values()) {
- long[] vals = repoStats.getTimeSeries(type).getValuePerMinute();
- long val = vals[vals.length - 1];
- line.append(type.name()).append('\t').append(val).append('\n');
- }
- nodeStatsLog.debug(line);
- }
- }
-
- @Override
- public void run() {
- if (log.isTraceEnabled())
- log.trace("Kernel thread started.");
- final long periodNs = PERIOD * m;
- while (running) {
- long beginNs = System.nanoTime();
- doSmallestPeriod();
-
- long waitNs = periodNs - (System.nanoTime() - beginNs);
- if (waitNs < 0)
- continue;
- // wait
- try {
- sleep(waitNs / m, (int) (waitNs % m));
- } catch (InterruptedException e) {
- // silent
- }
- cycle++;
- }
- }
-
- synchronized void destroyAndJoin() {
- running = false;
- notifyAll();
-// interrupt();
-// try {
-// join(PERIOD * 2);
-// } catch (InterruptedException e) {
-// // throw new CmsException("Kernel thread destruction was interrupted");
-// log.error("Kernel thread destruction was interrupted", e);
-// }
- }
-}
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.security.PrivilegedAction;
import java.security.URIParameter;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.TreeMap;
import java.util.TreeSet;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.security.auth.Subject;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-
import org.apache.commons.logging.Log;
import org.argeo.api.DataModelNamespace;
-import org.argeo.api.NodeConstants;
import org.osgi.framework.BundleContext;
import org.osgi.util.tracker.ServiceTracker;
out.println(key + "=" + display.get(key));
}
- static Session openAdminSession(Repository repository) {
- return openAdminSession(repository, null);
- }
-
- static Session openAdminSession(final Repository repository, final String workspaceName) {
- LoginContext loginContext = loginAsDataAdmin();
- return Subject.doAs(loginContext.getSubject(), new PrivilegedAction<Session>() {
-
- @Override
- public Session run() {
- try {
- return repository.login(workspaceName);
- } catch (RepositoryException e) {
- throw new IllegalStateException("Cannot open admin session", e);
- } finally {
- try {
- loginContext.logout();
- } catch (LoginException e) {
- throw new IllegalStateException(e);
- }
- }
- }
-
- });
- }
-
- static LoginContext loginAsDataAdmin() {
- ClassLoader currentCl = Thread.currentThread().getContextClassLoader();
- Thread.currentThread().setContextClassLoader(KernelUtils.class.getClassLoader());
- LoginContext loginContext;
- try {
- loginContext = new LoginContext(NodeConstants.LOGIN_CONTEXT_DATA_ADMIN);
- loginContext.login();
- } catch (LoginException e1) {
- throw new IllegalStateException("Could not login as data admin", e1);
- } finally {
- Thread.currentThread().setContextClassLoader(currentCl);
- }
- return loginContext;
- }
-
- static void doAsDataAdmin(Runnable action) {
- LoginContext loginContext = loginAsDataAdmin();
- Subject.doAs(loginContext.getSubject(), new PrivilegedAction<Void>() {
-
- @Override
- public Void run() {
- try {
- action.run();
- return null;
- } finally {
- try {
- loginContext.logout();
- } catch (LoginException e) {
- throw new IllegalStateException(e);
- }
- }
- }
-
- });
- }
+// static Session openAdminSession(Repository repository) {
+// return openAdminSession(repository, null);
+// }
+//
+// static Session openAdminSession(final Repository repository, final String workspaceName) {
+// LoginContext loginContext = loginAsDataAdmin();
+// return Subject.doAs(loginContext.getSubject(), new PrivilegedAction<Session>() {
+//
+// @Override
+// public Session run() {
+// try {
+// return repository.login(workspaceName);
+// } catch (RepositoryException e) {
+// throw new IllegalStateException("Cannot open admin session", e);
+// } finally {
+// try {
+// loginContext.logout();
+// } catch (LoginException e) {
+// throw new IllegalStateException(e);
+// }
+// }
+// }
+//
+// });
+// }
+//
+// static LoginContext loginAsDataAdmin() {
+// ClassLoader currentCl = Thread.currentThread().getContextClassLoader();
+// Thread.currentThread().setContextClassLoader(KernelUtils.class.getClassLoader());
+// LoginContext loginContext;
+// try {
+// loginContext = new LoginContext(NodeConstants.LOGIN_CONTEXT_DATA_ADMIN);
+// loginContext.login();
+// } catch (LoginException e1) {
+// throw new IllegalStateException("Could not login as data admin", e1);
+// } finally {
+// Thread.currentThread().setContextClassLoader(currentCl);
+// }
+// return loginContext;
+// }
+
+// static void doAsDataAdmin(Runnable action) {
+// LoginContext loginContext = loginAsDataAdmin();
+// Subject.doAs(loginContext.getSubject(), new PrivilegedAction<Void>() {
+//
+// @Override
+// public Void run() {
+// try {
+// action.run();
+// return null;
+// } finally {
+// try {
+// loginContext.logout();
+// } catch (LoginException e) {
+// throw new IllegalStateException(e);
+// }
+// }
+// }
+//
+// });
+// }
static void asyncOpen(ServiceTracker<?, ?> st) {
Runnable run = new Runnable() {
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import javax.jcr.Repository;
-
-import org.argeo.api.NodeConstants;
-import org.argeo.jcr.JcrRepositoryWrapper;
-
-class LocalRepository extends JcrRepositoryWrapper {
- private final String cn;
-
- public LocalRepository(Repository repository, String cn) {
- super(repository);
- this.cn = cn;
- // Map<String, Object> attrs = dataModelCapability.getAttributes();
- // cn = (String) attrs.get(DataModelNamespace.NAME);
- putDescriptor(NodeConstants.CN, cn);
- }
-
- String getCn() {
- return cn;
- }
-
-}
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.util.Dictionary;
-
-import javax.jcr.Repository;
-
-import org.argeo.cms.security.JcrKeyring;
-import org.osgi.service.cm.ConfigurationException;
-import org.osgi.service.cm.ManagedService;
-
-class NodeKeyRing extends JcrKeyring implements ManagedService{
-
- public NodeKeyRing(Repository repository) {
- super(repository);
- }
-
- @Override
- public void updated(Dictionary<String, ?> properties) throws ConfigurationException {
- }
-}
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.RepositoryFactory;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.jcr2dav.Jcr2davRepositoryFactory;
-import org.argeo.api.NodeConstants;
-import org.argeo.cms.internal.jcr.RepoConf;
-import org.osgi.framework.BundleContext;
-import org.osgi.framework.FrameworkUtil;
-import org.osgi.framework.InvalidSyntaxException;
-import org.osgi.framework.ServiceReference;
-
-/**
- * OSGi-aware Jackrabbit repository factory which can retrieve/publish
- * {@link Repository} as OSGi services.
- */
-class NodeRepositoryFactory implements RepositoryFactory {
- private final Log log = LogFactory.getLog(getClass());
-// private final BundleContext bundleContext = FrameworkUtil.getBundle(getClass()).getBundleContext();
-
- // private Resource fileRepositoryConfiguration = new ClassPathResource(
- // "/org/argeo/cms/internal/kernel/repository-localfs.xml");
-
- protected Repository getRepositoryByAlias(String alias) {
- BundleContext bundleContext = Activator.getBundleContext();
- if (bundleContext != null) {
- try {
- Collection<ServiceReference<Repository>> srs = bundleContext.getServiceReferences(Repository.class,
- "(" + NodeConstants.CN + "=" + alias + ")");
- if (srs.size() == 0)
- throw new IllegalArgumentException("No repository with alias " + alias + " found in OSGi registry");
- else if (srs.size() > 1)
- throw new IllegalArgumentException(
- srs.size() + " repositories with alias " + alias + " found in OSGi registry");
- return bundleContext.getService(srs.iterator().next());
- } catch (InvalidSyntaxException e) {
- throw new IllegalArgumentException("Cannot find repository with alias " + alias, e);
- }
- } else {
- // TODO ability to filter static services
- return null;
- }
- }
-
- // private void publish(String alias, Repository repository, Properties
- // properties) {
- // if (bundleContext != null) {
- // // do not modify reference
- // Hashtable<String, String> props = new Hashtable<String, String>();
- // props.putAll(props);
- // props.put(JCR_REPOSITORY_ALIAS, alias);
- // bundleContext.registerService(Repository.class.getName(), repository,
- // props);
- // }
- // }
-
- @SuppressWarnings({ "rawtypes" })
- public Repository getRepository(Map parameters) throws RepositoryException {
- // // check if can be found by alias
- // Repository repository = super.getRepository(parameters);
- // if (repository != null)
- // return repository;
-
- // check if remote
- Repository repository;
- String uri = null;
- if (parameters.containsKey(RepoConf.labeledUri.name()))
- uri = parameters.get(NodeConstants.LABELED_URI).toString();
- else if (parameters.containsKey(KernelConstants.JACKRABBIT_REPOSITORY_URI))
- uri = parameters.get(KernelConstants.JACKRABBIT_REPOSITORY_URI).toString();
-
- if (uri != null) {
- if (uri.startsWith("http")) {// http, https
- Object defaultWorkspace = parameters.get(RepoConf.defaultWorkspace.name());
- repository = createRemoteRepository(uri, defaultWorkspace != null ? defaultWorkspace.toString() : null);
- } else if (uri.startsWith("file"))// http, https
- repository = createFileRepository(uri, parameters);
- else if (uri.startsWith("vm")) {
- // log.warn("URI " + uri + " should have been managed by generic
- // JCR repository factory");
- repository = getRepositoryByAlias(getAliasFromURI(uri));
- } else
- throw new IllegalArgumentException("Unrecognized URI format " + uri);
-
- }
-
- else if (parameters.containsKey(NodeConstants.CN)) {
- // Properties properties = new Properties();
- // properties.putAll(parameters);
- String alias = parameters.get(NodeConstants.CN).toString();
- // publish(alias, repository, properties);
- // log.info("Registered JCR repository under alias '" + alias + "'
- // with properties " + properties);
- repository = getRepositoryByAlias(alias);
- } else
- throw new IllegalArgumentException("Not enough information in " + parameters);
-
- if (repository == null)
- throw new IllegalArgumentException("Repository not found " + parameters);
-
- return repository;
- }
-
- protected Repository createRemoteRepository(String uri, String defaultWorkspace) throws RepositoryException {
- Map<String, String> params = new HashMap<String, String>();
- params.put(KernelConstants.JACKRABBIT_REPOSITORY_URI, uri);
- if (defaultWorkspace != null)
- params.put(KernelConstants.JACKRABBIT_REMOTE_DEFAULT_WORKSPACE, defaultWorkspace);
- Repository repository = new Jcr2davRepositoryFactory().getRepository(params);
- if (repository == null)
- throw new IllegalArgumentException("Remote Davex repository " + uri + " not found");
- log.info("Initialized remote Jackrabbit repository from uri " + uri);
- return repository;
- }
-
- @SuppressWarnings({ "rawtypes" })
- protected Repository createFileRepository(final String uri, Map parameters) throws RepositoryException {
- throw new UnsupportedOperationException();
- // InputStream configurationIn = null;
- // try {
- // Properties vars = new Properties();
- // vars.putAll(parameters);
- // String dirPath = uri.substring("file:".length());
- // File homeDir = new File(dirPath);
- // if (homeDir.exists() && !homeDir.isDirectory())
- // throw new ArgeoJcrException("Repository home " + dirPath + " is not a
- // directory");
- // if (!homeDir.exists())
- // homeDir.mkdirs();
- // configurationIn = fileRepositoryConfiguration.getInputStream();
- // vars.put(RepositoryConfigurationParser.REPOSITORY_HOME_VARIABLE,
- // homeDir.getCanonicalPath());
- // RepositoryConfig repositoryConfig = RepositoryConfig.create(new
- // InputSource(configurationIn), vars);
- //
- // // TransientRepository repository = new
- // // TransientRepository(repositoryConfig);
- // final RepositoryImpl repository =
- // RepositoryImpl.create(repositoryConfig);
- // Session session = repository.login();
- // // FIXME make it generic
- // org.argeo.jcr.JcrUtils.addPrivilege(session, "/", "ROLE_ADMIN",
- // "jcr:all");
- // org.argeo.jcr.JcrUtils.logoutQuietly(session);
- // Runtime.getRuntime().addShutdownHook(new Thread("Clean JCR repository
- // " + uri) {
- // public void run() {
- // repository.shutdown();
- // log.info("Destroyed repository " + uri);
- // }
- // });
- // log.info("Initialized file Jackrabbit repository from uri " + uri);
- // return repository;
- // } catch (Exception e) {
- // throw new ArgeoJcrException("Cannot create repository " + uri, e);
- // } finally {
- // IOUtils.closeQuietly(configurationIn);
- // }
- }
-
- protected String getAliasFromURI(String uri) {
- try {
- URI uriObj = new URI(uri);
- String alias = uriObj.getPath();
- if (alias.charAt(0) == '/')
- alias = alias.substring(1);
- if (alias.charAt(alias.length() - 1) == '/')
- alias = alias.substring(0, alias.length() - 1);
- return alias;
- } catch (URISyntaxException e) {
- throw new IllegalArgumentException("Cannot interpret URI " + uri, e);
- }
- }
-
- /**
- * Called after the repository has been initialised. Does nothing by default.
- */
- @SuppressWarnings("rawtypes")
- protected void postInitialization(Repository repository, Map parameters) {
-
- }
-}
+++ /dev/null
-package org.argeo.cms.internal.kernel;
-
-import java.net.URI;
-import java.util.Dictionary;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.jcr.Repository;
-import javax.jcr.RepositoryFactory;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.core.RepositoryContext;
-import org.argeo.api.NodeConstants;
-import org.argeo.cms.internal.jcr.RepoConf;
-import org.argeo.cms.internal.jcr.RepositoryBuilder;
-import org.argeo.util.LangUtils;
-import org.osgi.framework.Constants;
-import org.osgi.service.cm.ConfigurationException;
-import org.osgi.service.cm.ManagedServiceFactory;
-
-/** A {@link ManagedServiceFactory} creating or referencing JCR repositories. */
-class RepositoryServiceFactory implements ManagedServiceFactory {
- private final static Log log = LogFactory.getLog(RepositoryServiceFactory.class);
-// private final BundleContext bc = FrameworkUtil.getBundle(RepositoryServiceFactory.class).getBundleContext();
-
- private Map<String, RepositoryContext> repositories = new HashMap<String, RepositoryContext>();
- private Map<String, Object> pidToCn = new HashMap<String, Object>();
-
- @Override
- public String getName() {
- return "Jackrabbit repository service factory";
- }
-
- @Override
- public void updated(String pid, Dictionary<String, ?> properties) throws ConfigurationException {
- if (repositories.containsKey(pid))
- throw new IllegalArgumentException("Already a repository registered for " + pid);
-
- if (properties == null)
- return;
-
- if (repositories.containsKey(pid)) {
- log.warn("Ignore update of Jackrabbit repository " + pid);
- return;
- }
-
- try {
- Object labeledUri = properties.get(RepoConf.labeledUri.name());
- if (labeledUri == null) {
- RepositoryBuilder repositoryBuilder = new RepositoryBuilder();
- RepositoryContext repositoryContext = repositoryBuilder.createRepositoryContext(properties);
- repositories.put(pid, repositoryContext);
- Dictionary<String, Object> props = LangUtils.dict(Constants.SERVICE_PID, pid);
- // props.put(ArgeoJcrConstants.JCR_REPOSITORY_URI,
- // properties.get(RepoConf.labeledUri.name()));
- Object cn = properties.get(NodeConstants.CN);
- if (cn != null) {
- props.put(NodeConstants.CN, cn);
- // props.put(NodeConstants.JCR_REPOSITORY_ALIAS, cn);
- pidToCn.put(pid, cn);
- }
- Activator.registerService(RepositoryContext.class, repositoryContext, props);
- } else {
- try {
- Object cn = properties.get(NodeConstants.CN);
- Object defaultWorkspace = properties.get(RepoConf.defaultWorkspace.name());
- if (defaultWorkspace == null)
- defaultWorkspace = RepoConf.defaultWorkspace.getDefault();
- URI uri = new URI(labeledUri.toString());
-// RepositoryFactory repositoryFactory = bc
-// .getService(bc.getServiceReference(RepositoryFactory.class));
- RepositoryFactory repositoryFactory = Activator.getService(RepositoryFactory.class);
- Map<String, String> parameters = new HashMap<String, String>();
- parameters.put(RepoConf.labeledUri.name(), uri.toString());
- parameters.put(RepoConf.defaultWorkspace.name(), defaultWorkspace.toString());
- Repository repository = repositoryFactory.getRepository(parameters);
- // Repository repository = NodeUtils.getRepositoryByUri(repositoryFactory,
- // uri.toString());
- Dictionary<String, Object> props = LangUtils.dict(Constants.SERVICE_PID, pid);
- props.put(RepoConf.labeledUri.name(),
- new URI(uri.getScheme(), null, uri.getHost(), uri.getPort(), uri.getPath(), null, null)
- .toString());
- if (cn != null) {
- props.put(NodeConstants.CN, cn);
- // props.put(NodeConstants.JCR_REPOSITORY_ALIAS, cn);
- pidToCn.put(pid, cn);
- }
- Activator.registerService(Repository.class, repository, props);
-
- // home
- if (cn.equals(NodeConstants.NODE_REPOSITORY)) {
- Dictionary<String, Object> homeProps = LangUtils.dict(NodeConstants.CN,
- NodeConstants.EGO_REPOSITORY);
- EgoRepository homeRepository = new EgoRepository(repository, true);
- Activator.registerService(Repository.class, homeRepository, homeProps);
- }
- } catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
- } catch (Exception e) {
- throw new IllegalStateException("Cannot create Jackrabbit repository " + pid, e);
- }
-
- }
-
- @Override
- public void deleted(String pid) {
- RepositoryContext repositoryContext = repositories.remove(pid);
- repositoryContext.getRepository().shutdown();
- if (log.isDebugEnabled())
- log.debug("Deleted repository " + pid);
- }
-
- public void shutdown() {
- for (String pid : repositories.keySet()) {
- try {
- repositories.get(pid).getRepository().shutdown();
- if (log.isDebugEnabled())
- log.debug("Shut down repository " + pid
- + (pidToCn.containsKey(pid) ? " (" + pidToCn.get(pid) + ")" : ""));
- } catch (Exception e) {
- log.error("Error when shutting down Jackrabbit repository " + pid, e);
- }
- }
- }
-
-}
+++ /dev/null
-package org.argeo.cms.security;
-
-import java.io.ByteArrayInputStream;
-import java.io.CharArrayReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Reader;
-import java.nio.charset.StandardCharsets;
-import java.security.GeneralSecurityException;
-import java.security.NoSuchAlgorithmException;
-import java.security.Provider;
-import java.security.SecureRandom;
-
-import javax.crypto.Cipher;
-import javax.crypto.CipherInputStream;
-import javax.crypto.NoSuchPaddingException;
-import javax.crypto.SecretKey;
-import javax.crypto.spec.IvParameterSpec;
-import javax.jcr.Binary;
-import javax.jcr.Node;
-import javax.jcr.NodeIterator;
-import javax.jcr.Property;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.query.Query;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.argeo.api.NodeConstants;
-import org.argeo.api.NodeUtils;
-import org.argeo.api.security.PBEKeySpecCallback;
-import org.argeo.cms.ArgeoNames;
-import org.argeo.cms.ArgeoTypes;
-import org.argeo.jcr.JcrException;
-import org.argeo.jcr.JcrUtils;
-
-/** JCR based implementation of a keyring */
-public class JcrKeyring extends AbstractKeyring implements ArgeoNames {
- private final static Log log = LogFactory.getLog(JcrKeyring.class);
- /**
- * Stronger with 256, but causes problem with Oracle JVM, force 128 in this case
- */
- public final static Long DEFAULT_SECRETE_KEY_LENGTH = 256l;
- public final static String DEFAULT_SECRETE_KEY_FACTORY = "PBKDF2WithHmacSHA1";
- public final static String DEFAULT_SECRETE_KEY_ENCRYPTION = "AES";
- public final static String DEFAULT_CIPHER_NAME = "AES/CBC/PKCS5Padding";
-
- private Integer iterationCountFactor = 200;
- private Long secretKeyLength = DEFAULT_SECRETE_KEY_LENGTH;
- private String secretKeyFactoryName = DEFAULT_SECRETE_KEY_FACTORY;
- private String secretKeyEncryption = DEFAULT_SECRETE_KEY_ENCRYPTION;
- private String cipherName = DEFAULT_CIPHER_NAME;
-
- private final Repository repository;
- // TODO remove thread local session ; open a session each time
- private ThreadLocal<Session> sessionThreadLocal = new ThreadLocal<Session>() {
-
- @Override
- protected Session initialValue() {
- return login();
- }
-
- };
-
- // FIXME is it really still needed?
- /**
- * When setup is called the session has not yet been saved and we don't want to
- * save it since there maybe other data which would be inconsistent. So we keep
- * a reference to this node which will then be used (an reset to null) when
- * handling the PBE callback. We keep one per thread in case multiple users are
- * accessing the same instance of a keyring.
- */
- // private ThreadLocal<Node> notYetSavedKeyring = new ThreadLocal<Node>() {
- //
- // @Override
- // protected Node initialValue() {
- // return null;
- // }
- // };
-
- public JcrKeyring(Repository repository) {
- this.repository = repository;
- }
-
- private Session session() {
- Session session = this.sessionThreadLocal.get();
- if (!session.isLive()) {
- session = login();
- sessionThreadLocal.set(session);
- }
- return session;
- }
-
- private Session login() {
- try {
- return repository.login(NodeConstants.HOME_WORKSPACE);
- } catch (RepositoryException e) {
- throw new JcrException("Cannot login key ring session", e);
- }
- }
-
- @Override
- protected synchronized Boolean isSetup() {
- Session session = null;
- try {
- // if (notYetSavedKeyring.get() != null)
- // return true;
- session = session();
- session.refresh(true);
- Node userHome = NodeUtils.getUserHome(session);
- return userHome.hasNode(ARGEO_KEYRING);
- } catch (RepositoryException e) {
- throw new JcrException("Cannot check whether keyring is setup", e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- @Override
- protected synchronized void setup(char[] password) {
- Binary binary = null;
- // InputStream in = null;
- try {
- session().refresh(true);
- Node userHome = NodeUtils.getUserHome(session());
- Node keyring;
- if (userHome.hasNode(ARGEO_KEYRING)) {
- throw new IllegalArgumentException("Keyring already set up");
- } else {
- keyring = userHome.addNode(ARGEO_KEYRING);
- }
- keyring.addMixin(ArgeoTypes.ARGEO_PBE_SPEC);
-
- // deterministic salt and iteration count based on username
- String username = session().getUserID();
- byte[] salt = new byte[8];
- byte[] usernameBytes = username.getBytes(StandardCharsets.UTF_8);
- for (int i = 0; i < salt.length; i++) {
- if (i < usernameBytes.length)
- salt[i] = usernameBytes[i];
- else
- salt[i] = 0;
- }
- try (InputStream in = new ByteArrayInputStream(salt);) {
- binary = session().getValueFactory().createBinary(in);
- keyring.setProperty(ARGEO_SALT, binary);
- } catch (IOException e) {
- throw new RuntimeException("Cannot set keyring salt", e);
- }
-
- Integer iterationCount = username.length() * iterationCountFactor;
- keyring.setProperty(ARGEO_ITERATION_COUNT, iterationCount);
-
- // default algo
- // TODO check if algo and key length are available, use DES if not
- keyring.setProperty(ARGEO_SECRET_KEY_FACTORY, secretKeyFactoryName);
- keyring.setProperty(ARGEO_KEY_LENGTH, secretKeyLength);
- keyring.setProperty(ARGEO_SECRET_KEY_ENCRYPTION, secretKeyEncryption);
- keyring.setProperty(ARGEO_CIPHER, cipherName);
-
- keyring.getSession().save();
-
- // encrypted password hash
- // IOUtils.closeQuietly(in);
- // JcrUtils.closeQuietly(binary);
- // byte[] btPass = hash(password, salt, iterationCount);
- // in = new ByteArrayInputStream(btPass);
- // binary = session().getValueFactory().createBinary(in);
- // keyring.setProperty(ARGEO_PASSWORD, binary);
-
- // notYetSavedKeyring.set(keyring);
- } catch (RepositoryException e) {
- throw new JcrException("Cannot setup keyring", e);
- } finally {
- JcrUtils.closeQuietly(binary);
- // IOUtils.closeQuietly(in);
- // JcrUtils.discardQuietly(session());
- }
- }
-
- @Override
- protected synchronized void handleKeySpecCallback(PBEKeySpecCallback pbeCallback) {
- Session session = null;
- try {
- session = session();
- session.refresh(true);
- Node userHome = NodeUtils.getUserHome(session);
- Node keyring;
- if (userHome.hasNode(ARGEO_KEYRING))
- keyring = userHome.getNode(ARGEO_KEYRING);
- // else if (notYetSavedKeyring.get() != null)
- // keyring = notYetSavedKeyring.get();
- else
- throw new IllegalStateException("Keyring not setup");
-
- pbeCallback.set(keyring.getProperty(ARGEO_SECRET_KEY_FACTORY).getString(),
- JcrUtils.getBinaryAsBytes(keyring.getProperty(ARGEO_SALT)),
- (int) keyring.getProperty(ARGEO_ITERATION_COUNT).getLong(),
- (int) keyring.getProperty(ARGEO_KEY_LENGTH).getLong(),
- keyring.getProperty(ARGEO_SECRET_KEY_ENCRYPTION).getString());
-
- // if (notYetSavedKeyring.get() != null)
- // notYetSavedKeyring.remove();
- } catch (RepositoryException e) {
- throw new JcrException("Cannot handle key spec callback", e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- /** The parent node must already exist at this path. */
- @Override
- protected synchronized void encrypt(String path, InputStream unencrypted) {
- // should be called first for lazy initialization
- SecretKey secretKey = getSecretKey(null);
- Cipher cipher = createCipher();
-
- // Binary binary = null;
- // InputStream in = null;
- try {
- session().refresh(true);
- Node node;
- if (!session().nodeExists(path)) {
- String parentPath = JcrUtils.parentPath(path);
- if (!session().nodeExists(parentPath))
- throw new IllegalStateException("No parent node of " + path);
- Node parentNode = session().getNode(parentPath);
- node = parentNode.addNode(JcrUtils.nodeNameFromPath(path));
- } else {
- node = session().getNode(path);
- }
- encrypt(secretKey, cipher, node, unencrypted);
- // node.addMixin(ArgeoTypes.ARGEO_ENCRYPTED);
- // SecureRandom random = new SecureRandom();
- // byte[] iv = new byte[16];
- // random.nextBytes(iv);
- // cipher.init(Cipher.ENCRYPT_MODE, secretKey, new IvParameterSpec(iv));
- // JcrUtils.setBinaryAsBytes(node, ARGEO_IV, iv);
- //
- // try (InputStream in = new CipherInputStream(unencrypted, cipher);) {
- // binary = session().getValueFactory().createBinary(in);
- // node.setProperty(Property.JCR_DATA, binary);
- // session().save();
- // }
- } catch (RepositoryException e) {
- throw new JcrException("Cannot encrypt", e);
- } finally {
- try {
- unencrypted.close();
- } catch (IOException e) {
- // silent
- }
- // IOUtils.closeQuietly(unencrypted);
- // IOUtils.closeQuietly(in);
- // JcrUtils.closeQuietly(binary);
- JcrUtils.logoutQuietly(session());
- }
- }
-
- protected synchronized void encrypt(SecretKey secretKey, Cipher cipher, Node node, InputStream unencrypted) {
- try {
- node.addMixin(ArgeoTypes.ARGEO_ENCRYPTED);
- SecureRandom random = new SecureRandom();
- byte[] iv = new byte[16];
- random.nextBytes(iv);
- cipher.init(Cipher.ENCRYPT_MODE, secretKey, new IvParameterSpec(iv));
- JcrUtils.setBinaryAsBytes(node, ARGEO_IV, iv);
-
- Binary binary = null;
- try (InputStream in = new CipherInputStream(unencrypted, cipher);) {
- binary = session().getValueFactory().createBinary(in);
- node.setProperty(Property.JCR_DATA, binary);
- session().save();
- } finally {
- JcrUtils.closeQuietly(binary);
- }
- } catch (RepositoryException e) {
- throw new JcrException("Cannot encrypt", e);
- } catch (GeneralSecurityException | IOException e) {
- throw new RuntimeException("Cannot encrypt", e);
- }
- }
-
- @Override
- protected synchronized InputStream decrypt(String path) {
- Binary binary = null;
- try {
- session().refresh(true);
- if (!session().nodeExists(path)) {
- char[] password = ask();
- Reader reader = new CharArrayReader(password);
- return new ByteArrayInputStream(IOUtils.toByteArray(reader, StandardCharsets.UTF_8));
- } else {
- // should be called first for lazy initialisation
- SecretKey secretKey = getSecretKey(null);
- Cipher cipher = createCipher();
- Node node = session().getNode(path);
- return decrypt(secretKey, cipher, node);
- }
- } catch (RepositoryException e) {
- throw new JcrException("Cannot decrypt", e);
- } catch (GeneralSecurityException | IOException e) {
- throw new RuntimeException("Cannot decrypt", e);
- } finally {
- JcrUtils.closeQuietly(binary);
- JcrUtils.logoutQuietly(session());
- }
- }
-
- protected synchronized InputStream decrypt(SecretKey secretKey, Cipher cipher, Node node)
- throws RepositoryException, GeneralSecurityException {
- if (node.hasProperty(ARGEO_IV)) {
- byte[] iv = JcrUtils.getBinaryAsBytes(node.getProperty(ARGEO_IV));
- cipher.init(Cipher.DECRYPT_MODE, secretKey, new IvParameterSpec(iv));
- } else {
- cipher.init(Cipher.DECRYPT_MODE, secretKey);
- }
-
- Binary binary = node.getProperty(Property.JCR_DATA).getBinary();
- InputStream encrypted = binary.getStream();
- return new CipherInputStream(encrypted, cipher);
- }
-
- protected Cipher createCipher() {
- try {
- Node userHome = NodeUtils.getUserHome(session());
- if (!userHome.hasNode(ARGEO_KEYRING))
- throw new IllegalArgumentException("Keyring not setup");
- Node keyring = userHome.getNode(ARGEO_KEYRING);
- String cipherName = keyring.getProperty(ARGEO_CIPHER).getString();
- Provider securityProvider = getSecurityProvider();
- Cipher cipher;
- if (securityProvider == null)// TODO use BC?
- cipher = Cipher.getInstance(cipherName);
- else
- cipher = Cipher.getInstance(cipherName, securityProvider);
- return cipher;
- } catch (NoSuchAlgorithmException | NoSuchPaddingException e) {
- throw new IllegalArgumentException("Cannot get cipher", e);
- } catch (RepositoryException e) {
- throw new JcrException("Cannot get cipher", e);
- } finally {
-
- }
- }
-
- public synchronized void changePassword(char[] oldPassword, char[] newPassword) {
- // TODO make it XA compatible
- SecretKey oldSecretKey = getSecretKey(oldPassword);
- SecretKey newSecretKey = getSecretKey(newPassword);
- Session session = session();
- try {
- NodeIterator encryptedNodes = session.getWorkspace().getQueryManager()
- .createQuery("select * from [argeo:encrypted]", Query.JCR_SQL2).execute().getNodes();
- while (encryptedNodes.hasNext()) {
- Node node = encryptedNodes.nextNode();
- InputStream in = decrypt(oldSecretKey, createCipher(), node);
- encrypt(newSecretKey, createCipher(), node, in);
- if (log.isDebugEnabled())
- log.debug("Converted keyring encrypted value of " + node.getPath());
- }
- } catch (GeneralSecurityException e) {
- throw new RuntimeException("Cannot change JCR keyring password", e);
- } catch (RepositoryException e) {
- throw new JcrException("Cannot change JCR keyring password", e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- // public synchronized void setSession(Session session) {
- // this.session = session;
- // }
-
- public void setIterationCountFactor(Integer iterationCountFactor) {
- this.iterationCountFactor = iterationCountFactor;
- }
-
- public void setSecretKeyLength(Long keyLength) {
- this.secretKeyLength = keyLength;
- }
-
- public void setSecretKeyFactoryName(String secreteKeyFactoryName) {
- this.secretKeyFactoryName = secreteKeyFactoryName;
- }
-
- public void setSecretKeyEncryption(String secreteKeyEncryption) {
- this.secretKeyEncryption = secreteKeyEncryption;
- }
-
- public void setCipherName(String cipherName) {
- this.cipherName = cipherName;
- }
-
-}
\ No newline at end of file
+++ /dev/null
-package org.argeo.cms.tabular;
-
-import java.io.OutputStream;
-
-import org.argeo.api.tabular.TabularWriter;
-import org.argeo.util.CsvWriter;
-
-/** Write tabular content in a stream as CSV. Wraps a {@link CsvWriter}. */
-public class CsvTabularWriter implements TabularWriter {
- private CsvWriter csvWriter;
-
- public CsvTabularWriter(OutputStream out) {
- this.csvWriter = new CsvWriter(out);
- }
-
- public void appendRow(Object[] row) {
- csvWriter.writeLine(row);
- }
-
- public void close() {
- }
-
-}
+++ /dev/null
-package org.argeo.cms.tabular;
-
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.ArrayBlockingQueue;
-
-import javax.jcr.Binary;
-import javax.jcr.Node;
-import javax.jcr.NodeIterator;
-import javax.jcr.Property;
-import javax.jcr.PropertyType;
-import javax.jcr.RepositoryException;
-
-import org.apache.commons.io.IOUtils;
-import org.argeo.api.tabular.ArrayTabularRow;
-import org.argeo.api.tabular.TabularColumn;
-import org.argeo.api.tabular.TabularRow;
-import org.argeo.api.tabular.TabularRowIterator;
-import org.argeo.cms.ArgeoTypes;
-import org.argeo.jcr.JcrException;
-import org.argeo.util.CsvParser;
-
-/** Iterates over the rows of a {@link ArgeoTypes#ARGEO_TABLE} node. */
-public class JcrTabularRowIterator implements TabularRowIterator {
- private Boolean hasNext = null;
- private Boolean parsingCompleted = false;
-
- private Long currentRowNumber = 0l;
-
- private List<TabularColumn> header = new ArrayList<TabularColumn>();
-
- /** referenced so that we can close it */
- private Binary binary;
- private InputStream in;
-
- private CsvParser csvParser;
- private ArrayBlockingQueue<List<String>> textLines;
-
- public JcrTabularRowIterator(Node tableNode) {
- try {
- for (NodeIterator it = tableNode.getNodes(); it.hasNext();) {
- Node node = it.nextNode();
- if (node.isNodeType(ArgeoTypes.ARGEO_COLUMN)) {
- Integer type = PropertyType.valueFromName(node.getProperty(
- Property.JCR_REQUIRED_TYPE).getString());
- TabularColumn tc = new TabularColumn(node.getProperty(
- Property.JCR_TITLE).getString(), type);
- header.add(tc);
- }
- }
- Node contentNode = tableNode.getNode(Property.JCR_CONTENT);
- if (contentNode.isNodeType(ArgeoTypes.ARGEO_CSV)) {
- textLines = new ArrayBlockingQueue<List<String>>(1000);
- csvParser = new CsvParser() {
- protected void processLine(Integer lineNumber,
- List<String> header, List<String> tokens) {
- try {
- textLines.put(tokens);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- // textLines.add(tokens);
- if (hasNext == null) {
- hasNext = true;
- synchronized (JcrTabularRowIterator.this) {
- JcrTabularRowIterator.this.notifyAll();
- }
- }
- }
- };
- csvParser.setNoHeader(true);
- binary = contentNode.getProperty(Property.JCR_DATA).getBinary();
- in = binary.getStream();
- Thread thread = new Thread(contentNode.getPath() + " reader") {
- public void run() {
- try {
- csvParser.parse(in);
- } finally {
- parsingCompleted = true;
- IOUtils.closeQuietly(in);
- }
- }
- };
- thread.start();
- }
- } catch (RepositoryException e) {
- throw new JcrException("Cannot read table " + tableNode, e);
- }
- }
-
- public synchronized boolean hasNext() {
- // we don't know if there is anything available
- // while (hasNext == null)
- // try {
- // wait();
- // } catch (InterruptedException e) {
- // // silent
- // // FIXME better deal with interruption
- // Thread.currentThread().interrupt();
- // break;
- // }
-
- // buffer not empty
- if (!textLines.isEmpty())
- return true;
-
- // maybe the parsing is finished but the flag has not been set
- while (!parsingCompleted && textLines.isEmpty())
- try {
- wait(100);
- } catch (InterruptedException e) {
- // silent
- // FIXME better deal with interruption
- Thread.currentThread().interrupt();
- break;
- }
-
- // buffer not empty
- if (!textLines.isEmpty())
- return true;
-
- // (parsingCompleted && textLines.isEmpty())
- return false;
-
- // if (!hasNext && textLines.isEmpty()) {
- // if (in != null) {
- // IOUtils.closeQuietly(in);
- // in = null;
- // }
- // if (binary != null) {
- // JcrUtils.closeQuietly(binary);
- // binary = null;
- // }
- // return false;
- // } else
- // return true;
- }
-
- public synchronized TabularRow next() {
- try {
- List<String> tokens = textLines.take();
- List<Object> objs = new ArrayList<Object>(tokens.size());
- for (String token : tokens) {
- // TODO convert to other formats using header
- objs.add(token);
- }
- currentRowNumber++;
- return new ArrayTabularRow(objs);
- } catch (InterruptedException e) {
- // silent
- // FIXME better deal with interruption
- }
- return null;
- }
-
- public void remove() {
- throw new UnsupportedOperationException();
- }
-
- public Long getCurrentRowNumber() {
- return currentRowNumber;
- }
-
- public List<TabularColumn> getHeader() {
- return header;
- }
-
-}
+++ /dev/null
-package org.argeo.cms.tabular;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.InputStream;
-import java.util.List;
-
-import javax.jcr.Binary;
-import javax.jcr.Node;
-import javax.jcr.Property;
-import javax.jcr.PropertyType;
-import javax.jcr.RepositoryException;
-
-import org.apache.commons.io.IOUtils;
-import org.argeo.api.tabular.TabularColumn;
-import org.argeo.api.tabular.TabularWriter;
-import org.argeo.cms.ArgeoTypes;
-import org.argeo.jcr.JcrException;
-import org.argeo.jcr.JcrUtils;
-import org.argeo.util.CsvWriter;
-
-/** Write / reference tabular content in a JCR repository. */
-public class JcrTabularWriter implements TabularWriter {
- private Node contentNode;
- private ByteArrayOutputStream out;
- private CsvWriter csvWriter;
-
- @SuppressWarnings("unused")
- private final List<TabularColumn> columns;
-
- /** Creates a table node */
- public JcrTabularWriter(Node tableNode, List<TabularColumn> columns,
- String contentNodeType) {
- try {
- this.columns = columns;
- for (TabularColumn column : columns) {
- String normalized = JcrUtils.replaceInvalidChars(column
- .getName());
- Node columnNode = tableNode.addNode(normalized,
- ArgeoTypes.ARGEO_COLUMN);
- columnNode.setProperty(Property.JCR_TITLE, column.getName());
- if (column.getType() != null)
- columnNode.setProperty(Property.JCR_REQUIRED_TYPE,
- PropertyType.nameFromValue(column.getType()));
- else
- columnNode.setProperty(Property.JCR_REQUIRED_TYPE,
- PropertyType.TYPENAME_STRING);
- }
- contentNode = tableNode.addNode(Property.JCR_CONTENT,
- contentNodeType);
- if (contentNodeType.equals(ArgeoTypes.ARGEO_CSV)) {
- contentNode.setProperty(Property.JCR_MIMETYPE, "text/csv");
- contentNode.setProperty(Property.JCR_ENCODING, "UTF-8");
- out = new ByteArrayOutputStream();
- csvWriter = new CsvWriter(out);
- }
- } catch (RepositoryException e) {
- throw new JcrException("Cannot create table node " + tableNode, e);
- }
- }
-
- public void appendRow(Object[] row) {
- csvWriter.writeLine(row);
- }
-
- public void close() {
- Binary binary = null;
- InputStream in = null;
- try {
- // TODO parallelize with pipes and writing from another thread
- in = new ByteArrayInputStream(out.toByteArray());
- binary = contentNode.getSession().getValueFactory()
- .createBinary(in);
- contentNode.setProperty(Property.JCR_DATA, binary);
- } catch (RepositoryException e) {
- throw new JcrException("Cannot store data in " + contentNode, e);
- } finally {
- IOUtils.closeQuietly(in);
- JcrUtils.closeQuietly(binary);
- }
- }
-}
+++ /dev/null
-/** Argeo CMS implementation of the Argeo Tabular API (CSV, JCR). */
-package org.argeo.cms.tabular;
\ No newline at end of file
+++ /dev/null
-package org.argeo.security.jackrabbit;
-
-import java.security.Principal;
-import java.util.Map;
-import java.util.Set;
-
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-
-import org.apache.jackrabbit.core.security.authorization.acl.ACLProvider;
-
-/** Argeo specific access control provider */
-public class ArgeoAccessControlProvider extends ACLProvider {
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- @Override
- public void init(Session systemSession, Map configuration) throws RepositoryException {
- if (!configuration.containsKey(PARAM_ALLOW_UNKNOWN_PRINCIPALS))
- configuration.put(PARAM_ALLOW_UNKNOWN_PRINCIPALS, "true");
- if (!configuration.containsKey(PARAM_OMIT_DEFAULT_PERMISSIONS))
- configuration.put(PARAM_OMIT_DEFAULT_PERMISSIONS, "true");
- super.init(systemSession, configuration);
- }
-
- @Override
- public boolean canAccessRoot(Set<Principal> principals) throws RepositoryException {
- return super.canAccessRoot(principals);
- }
-
-}
+++ /dev/null
-package org.argeo.security.jackrabbit;
-
-import javax.jcr.PathNotFoundException;
-import javax.jcr.RepositoryException;
-import javax.jcr.security.Privilege;
-
-import org.apache.jackrabbit.core.id.ItemId;
-import org.apache.jackrabbit.core.security.DefaultAccessManager;
-import org.apache.jackrabbit.spi.Path;
-
-/**
- * Intermediary class in order to have a consistent naming in config files. Does
- * nothing for the time being, but may in the future.
- */
-public class ArgeoAccessManager extends DefaultAccessManager {
-
- @Override
- public boolean canRead(Path itemPath, ItemId itemId)
- throws RepositoryException {
- return super.canRead(itemPath, itemId);
- }
-
- @Override
- public Privilege[] getPrivileges(String absPath)
- throws PathNotFoundException, RepositoryException {
- return super.getPrivileges(absPath);
- }
-
- @Override
- public boolean hasPrivileges(String absPath, Privilege[] privileges)
- throws PathNotFoundException, RepositoryException {
- return super.hasPrivileges(absPath, privileges);
- }
-
-}
+++ /dev/null
-package org.argeo.security.jackrabbit;
-
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-
-import org.apache.jackrabbit.core.security.authentication.AuthContext;
-
-/** Wraps a regular {@link LoginContext}, using the proper class loader. */
-class ArgeoAuthContext implements AuthContext {
- private LoginContext lc;
-
- public ArgeoAuthContext(String appName, Subject subject, CallbackHandler callbackHandler) {
- try {
- lc = new LoginContext(appName, subject, callbackHandler);
- } catch (LoginException e) {
- throw new IllegalStateException("Cannot configure Jackrabbit login context", e);
- }
- }
-
- @Override
- public void login() throws LoginException {
- lc.login();
- }
-
- @Override
- public Subject getSubject() {
- return lc.getSubject();
- }
-
- @Override
- public void logout() throws LoginException {
- lc.logout();
- }
-
-}
+++ /dev/null
-package org.argeo.security.jackrabbit;
-
-import java.security.Principal;
-import java.util.HashSet;
-import java.util.Properties;
-import java.util.Set;
-
-import javax.jcr.Credentials;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.x500.X500Principal;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.api.security.user.UserManager;
-import org.apache.jackrabbit.core.DefaultSecurityManager;
-import org.apache.jackrabbit.core.security.AMContext;
-import org.apache.jackrabbit.core.security.AccessManager;
-import org.apache.jackrabbit.core.security.SecurityConstants;
-import org.apache.jackrabbit.core.security.SystemPrincipal;
-import org.apache.jackrabbit.core.security.authentication.AuthContext;
-import org.apache.jackrabbit.core.security.authentication.CallbackHandlerImpl;
-import org.apache.jackrabbit.core.security.authorization.WorkspaceAccessManager;
-import org.apache.jackrabbit.core.security.principal.AdminPrincipal;
-import org.apache.jackrabbit.core.security.principal.PrincipalProvider;
-import org.argeo.api.NodeConstants;
-import org.argeo.api.security.AnonymousPrincipal;
-import org.argeo.api.security.DataAdminPrincipal;
-import org.argeo.cms.auth.CmsSession;
-import org.osgi.framework.BundleContext;
-import org.osgi.framework.FrameworkUtil;
-
-/** Customises Jackrabbit security. */
-public class ArgeoSecurityManager extends DefaultSecurityManager {
- private final static Log log = LogFactory.getLog(ArgeoSecurityManager.class);
-
- private BundleContext cmsBundleContext = null;
-
- public ArgeoSecurityManager() {
- if (FrameworkUtil.getBundle(CmsSession.class) != null) {
- cmsBundleContext = FrameworkUtil.getBundle(CmsSession.class).getBundleContext();
- }
- }
-
- public AuthContext getAuthContext(Credentials creds, Subject subject, String workspaceName)
- throws RepositoryException {
- checkInitialized();
-
- CallbackHandler cbHandler = new CallbackHandlerImpl(creds, getSystemSession(), getPrincipalProviderRegistry(),
- adminId, anonymousId);
- String appName = "Jackrabbit";
- return new ArgeoAuthContext(appName, subject, cbHandler);
- }
-
- @Override
- public AccessManager getAccessManager(Session session, AMContext amContext) throws RepositoryException {
- synchronized (getSystemSession()) {
- return super.getAccessManager(session, amContext);
- }
- }
-
- @Override
- public UserManager getUserManager(Session session) throws RepositoryException {
- synchronized (getSystemSession()) {
- return super.getUserManager(session);
- }
- }
-
- @Override
- protected PrincipalProvider createDefaultPrincipalProvider(Properties[] moduleConfig) throws RepositoryException {
- return super.createDefaultPrincipalProvider(moduleConfig);
- }
-
- /** Called once when the session is created */
- @Override
- public String getUserID(Subject subject, String workspaceName) throws RepositoryException {
- boolean isAnonymous = !subject.getPrincipals(AnonymousPrincipal.class).isEmpty();
- boolean isDataAdmin = !subject.getPrincipals(DataAdminPrincipal.class).isEmpty();
- boolean isJackrabbitSystem = !subject.getPrincipals(SystemPrincipal.class).isEmpty();
- Set<X500Principal> userPrincipal = subject.getPrincipals(X500Principal.class);
- boolean isRegularUser = !userPrincipal.isEmpty();
- CmsSession cmsSession = null;
- if (cmsBundleContext != null) {
- cmsSession = CmsSession.getCmsSession(cmsBundleContext, subject);
- if (log.isTraceEnabled())
- log.trace("Opening JCR session for CMS session " + cmsSession);
- }
-
- if (isAnonymous) {
- if (isDataAdmin || isJackrabbitSystem || isRegularUser)
- throw new IllegalStateException("Inconsistent " + subject);
- else
- return NodeConstants.ROLE_ANONYMOUS;
- } else if (isRegularUser) {// must be before DataAdmin
- if (isAnonymous || isJackrabbitSystem)
- throw new IllegalStateException("Inconsistent " + subject);
- else {
- if (userPrincipal.size() > 1) {
- StringBuilder buf = new StringBuilder();
- for (X500Principal principal : userPrincipal)
- buf.append(' ').append('\"').append(principal).append('\"');
- throw new RuntimeException("Multiple user principals:" + buf);
- }
- return userPrincipal.iterator().next().getName();
- }
- } else if (isDataAdmin) {
- if (isAnonymous || isJackrabbitSystem || isRegularUser)
- throw new IllegalStateException("Inconsistent " + subject);
- else {
- assert !subject.getPrincipals(AdminPrincipal.class).isEmpty();
- return NodeConstants.ROLE_DATA_ADMIN;
- }
- } else if (isJackrabbitSystem) {
- if (isAnonymous || isDataAdmin || isRegularUser)
- throw new IllegalStateException("Inconsistent " + subject);
- else
- return super.getUserID(subject, workspaceName);
- } else {
- throw new IllegalStateException("Unrecognized subject type: " + subject);
- }
- }
-
- @Override
- protected WorkspaceAccessManager createDefaultWorkspaceAccessManager() {
- WorkspaceAccessManager wam = super.createDefaultWorkspaceAccessManager();
- ArgeoWorkspaceAccessManagerImpl workspaceAccessManager = new ArgeoWorkspaceAccessManagerImpl(wam);
- if (log.isTraceEnabled())
- log.trace("Created workspace access manager");
- return workspaceAccessManager;
- }
-
- private class ArgeoWorkspaceAccessManagerImpl implements SecurityConstants, WorkspaceAccessManager {
- private final WorkspaceAccessManager wam;
-
- public ArgeoWorkspaceAccessManagerImpl(WorkspaceAccessManager wam) {
- super();
- this.wam = wam;
- }
-
- public void init(Session systemSession) throws RepositoryException {
- wam.init(systemSession);
- Repository repository = systemSession.getRepository();
- if (log.isTraceEnabled())
- log.trace("Initialised workspace access manager on repository " + repository
- + ", systemSession workspace: " + systemSession.getWorkspace().getName());
- }
-
- public void close() throws RepositoryException {
- }
-
- public boolean grants(Set<Principal> principals, String workspaceName) throws RepositoryException {
- // TODO: implements finer access to workspaces
- if (log.isTraceEnabled())
- log.trace("Grants " + new HashSet<>(principals) + " access to workspace '" + workspaceName + "'");
- return true;
- // return wam.grants(principals, workspaceName);
- }
- }
-
-}
+++ /dev/null
-package org.argeo.security.jackrabbit;
-
-import java.util.Map;
-import java.util.Set;
-
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.login.LoginException;
-import javax.security.auth.spi.LoginModule;
-import javax.security.auth.x500.X500Principal;
-
-import org.apache.jackrabbit.core.security.AnonymousPrincipal;
-import org.apache.jackrabbit.core.security.SecurityConstants;
-import org.apache.jackrabbit.core.security.principal.AdminPrincipal;
-import org.argeo.api.security.DataAdminPrincipal;
-
-/** JAAS login module used when initiating a new Jackrabbit session. */
-public class SystemJackrabbitLoginModule implements LoginModule {
- private Subject subject;
-
- @Override
- public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState,
- Map<String, ?> options) {
- this.subject = subject;
- }
-
- @Override
- public boolean login() throws LoginException {
- return true;
- }
-
- @Override
- public boolean commit() throws LoginException {
- Set<org.argeo.api.security.AnonymousPrincipal> anonPrincipal = subject
- .getPrincipals(org.argeo.api.security.AnonymousPrincipal.class);
- if (!anonPrincipal.isEmpty()) {
- subject.getPrincipals().add(new AnonymousPrincipal());
- return true;
- }
-
- Set<DataAdminPrincipal> initPrincipal = subject.getPrincipals(DataAdminPrincipal.class);
- if (!initPrincipal.isEmpty()) {
- subject.getPrincipals().add(new AdminPrincipal(SecurityConstants.ADMIN_ID));
- return true;
- }
-
- Set<X500Principal> userPrincipal = subject.getPrincipals(X500Principal.class);
- if (userPrincipal.isEmpty())
- throw new LoginException("Subject must be pre-authenticated");
- if (userPrincipal.size() > 1)
- throw new LoginException("Multiple user principals " + userPrincipal);
-
- return true;
- }
-
- @Override
- public boolean abort() throws LoginException {
- return true;
- }
-
- @Override
- public boolean logout() throws LoginException {
- subject.getPrincipals().removeAll(subject.getPrincipals(AnonymousPrincipal.class));
- subject.getPrincipals().removeAll(subject.getPrincipals(AdminPrincipal.class));
- return true;
- }
-}
+++ /dev/null
-/** Integration of Jackrabbit with Argeo security model. */
-package org.argeo.security.jackrabbit;
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="src"/>
- <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11"/>
- <classpathentry kind="output" path="bin"/>
-</classpath>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>org.argeo.jcr</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.ManifestBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.SchemaBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.pde.PluginNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
+++ /dev/null
-/MANIFEST.MF
+++ /dev/null
-Provide-Capability:\
-cms.datamodel; name=jcrx; cnd=/org/argeo/jcr/jcrx.cnd; abstract=true
-
-Import-Package:\
-*
+++ /dev/null
-source.. = src/,\
- ext/test/
-output.. = bin/
-bin.includes = META-INF/,\
- .
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8" standalone="no"?><project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.argeo.commons</groupId>
- <artifactId>argeo-commons</artifactId>
- <version>2.3-SNAPSHOT</version>
- <relativePath>..</relativePath>
- </parent>
- <artifactId>org.argeo.jcr</artifactId>
- <name>Commons JCR</name>
-</project>
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11"/>
- <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
- <classpathentry kind="src" path="src"/>
- <classpathentry kind="output" path="bin"/>
-</classpath>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>org.argeo.maintenance</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.ManifestBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.SchemaBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.pde.PluginNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
+++ /dev/null
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=disabled
-org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
-org.eclipse.jdt.core.compiler.annotation.nonnull=org.eclipse.jdt.annotation.NonNull
-org.eclipse.jdt.core.compiler.annotation.nonnull.secondary=
-org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annotation.NonNullByDefault
-org.eclipse.jdt.core.compiler.annotation.nonnullbydefault.secondary=
-org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable
-org.eclipse.jdt.core.compiler.annotation.nullable.secondary=
-org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled
-org.eclipse.jdt.core.compiler.problem.APILeak=warning
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning
-org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
-org.eclipse.jdt.core.compiler.problem.comparingIdentical=warning
-org.eclipse.jdt.core.compiler.problem.deadCode=warning
-org.eclipse.jdt.core.compiler.problem.deprecation=warning
-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=warning
-org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
-org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
-org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
-org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=warning
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=warning
-org.eclipse.jdt.core.compiler.problem.includeNullInfoFromAsserts=disabled
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning
-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning
-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
-org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=warning
-org.eclipse.jdt.core.compiler.problem.missingDefaultCase=ignore
-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingEnumCaseDespiteDefault=disabled
-org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning
-org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=warning
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=warning
-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
-org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
-org.eclipse.jdt.core.compiler.problem.nonnullTypeVariableFromLegacyInvocation=warning
-org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=error
-org.eclipse.jdt.core.compiler.problem.nullReference=warning
-org.eclipse.jdt.core.compiler.problem.nullSpecViolation=error
-org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning
-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.pessimisticNullAnalysisForFreeTypeVariables=warning
-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
-org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore
-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=warning
-org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
-org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled
-org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
-org.eclipse.jdt.core.compiler.problem.terminalDeprecation=warning
-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
-org.eclipse.jdt.core.compiler.problem.unavoidableGenericTypeProblems=enabled
-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
-org.eclipse.jdt.core.compiler.problem.unclosedCloseable=warning
-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentType=warning
-org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentTypeStrict=disabled
-org.eclipse.jdt.core.compiler.problem.unlikelyEqualsArgumentType=info
-org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
-org.eclipse.jdt.core.compiler.problem.unstableAutoModuleName=warning
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.unusedExceptionParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedImport=warning
-org.eclipse.jdt.core.compiler.problem.unusedLabel=warning
-org.eclipse.jdt.core.compiler.problem.unusedLocal=warning
-org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=warning
-org.eclipse.jdt.core.compiler.problem.unusedTypeParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=warning
+++ /dev/null
-/MANIFEST.MF
+++ /dev/null
-Bundle-Activator: org.argeo.maintenance.internal.Activator
+++ /dev/null
-source.. = src/
-output.. = bin/
-bin.includes = META-INF/,\
- .
-additional.bundles = org.slf4j.log4j12,\
- org.slf4j.api
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.argeo.commons</groupId>
- <artifactId>argeo-commons</artifactId>
- <version>2.3-SNAPSHOT</version>
- <relativePath>..</relativePath>
- </parent>
- <artifactId>org.argeo.maintenance</artifactId>
- <name>Maintenance</name>
- <packaging>jar</packaging>
- <dependencies>
- <dependency>
- <groupId>org.argeo.commons</groupId>
- <artifactId>org.argeo.jcr</artifactId>
- <version>2.3-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.argeo.commons</groupId>
- <artifactId>org.argeo.enterprise</artifactId>
- <version>2.3-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.argeo.commons</groupId>
- <artifactId>org.argeo.core</artifactId>
- <version>2.3-SNAPSHOT</version>
- </dependency>
- <dependency>
- <groupId>org.argeo.commons</groupId>
- <artifactId>org.argeo.api</artifactId>
- <version>2.3-SNAPSHOT</version>
- </dependency>
- </dependencies>
-</project>
\ No newline at end of file