Merge Argeo Util and parts of Argeo JCR in Argeo Core.
authorMathieu Baudier <mbaudier@argeo.org>
Mon, 9 Nov 2020 10:09:43 +0000 (11:09 +0100)
committerMathieu Baudier <mbaudier@argeo.org>
Mon, 9 Nov 2020 10:09:43 +0000 (11:09 +0100)
153 files changed:
dep/org.argeo.dep.cms.client/pom.xml
org.argeo.core/bnd.bnd
org.argeo.core/build.properties
org.argeo.core/ext/test/log4j.properties [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/fs/FsUtilsTest.java [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/jcr/fs/JcrFileSystemTest.java [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/CsvParserEncodingTest.java [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/CsvParserParseFileTest.java [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/CsvParserTest.java [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/CsvParserWithQuotedSeparatorTest.java [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/CsvWriterTest.java [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/ReferenceFile.csv [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/TestParse-ISO.csv [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/TestParse-UTF-8.csv [new file with mode: 0644]
org.argeo.core/ext/test/org/argeo/util/ThroughputTest.java [new file with mode: 0644]
org.argeo.core/pom.xml
org.argeo.core/src/org/argeo/fs/BasicSyncFileVisitor.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/fs/FsUtils.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/fs/package-info.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/JackrabbitAdminLoginModule.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/JackrabbitDataModelMigration.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryFactory.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryService.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryServiceFactory.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/client/JackrabbitClient.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/client/NonSerialBasicAuthCache.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/fs/AbstractJackrabbitFsProvider.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/fs/DavexFsProvider.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/fs/JackrabbitMemoryFsProvider.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/fs/fs-memory.xml [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/fs/package-info.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/package-info.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/repository-h2.xml [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/repository-localfs.xml [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/repository-memory.xml [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/repository-postgresql-ds.xml [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/repository-postgresql.xml [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/security/JackrabbitSecurityUtils.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/security/package-info.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/unit/AbstractJackrabbitTestCase.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/unit/jaas.config [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/unit/package-info.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/unit/repository-h2.xml [new file with mode: 0644]
org.argeo.core/src/org/argeo/jackrabbit/unit/repository-memory.xml [new file with mode: 0644]
org.argeo.core/src/org/argeo/jcr/proxy/AbstractUrlProxy.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jcr/proxy/ResourceProxy.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jcr/proxy/ResourceProxyServlet.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jcr/proxy/package-info.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jcr/unit/AbstractJcrTestCase.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/jcr/unit/package-info.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/sync/SyncException.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/sync/SyncResult.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/sync/package-info.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/CsvParser.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/CsvParserWithLinesAsMap.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/CsvWriter.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/DictionaryKeys.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/DigestUtils.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/DirH.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/LangUtils.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/OS.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/PasswordEncryption.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/ServiceChannel.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/StreamUtils.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/Tester.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/TesterStatus.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/Throughput.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/UuidUtils.java [new file with mode: 0644]
org.argeo.core/src/org/argeo/util/package-info.java [new file with mode: 0644]
org.argeo.eclipse.ui.rap/pom.xml
org.argeo.enterprise/build.properties
org.argeo.enterprise/pom.xml
org.argeo.jcr/bnd.bnd
org.argeo.jcr/build.properties
org.argeo.jcr/ext/test/log4j.properties [deleted file]
org.argeo.jcr/ext/test/org/argeo/fs/FsUtilsTest.java [deleted file]
org.argeo.jcr/ext/test/org/argeo/jcr/fs/JcrFileSystemTest.java [deleted file]
org.argeo.jcr/pom.xml
org.argeo.jcr/repository.xml [deleted file]
org.argeo.jcr/repository/repository/meta/rootUUID [deleted file]
org.argeo.jcr/repository/repository/namespaces/ns_idx.properties [deleted file]
org.argeo.jcr/repository/repository/namespaces/ns_reg.properties [deleted file]
org.argeo.jcr/repository/workspaces/default/workspace.xml [deleted file]
org.argeo.jcr/src/org/argeo/fs/BasicSyncFileVisitor.java [deleted file]
org.argeo.jcr/src/org/argeo/fs/FsUtils.java [deleted file]
org.argeo.jcr/src/org/argeo/fs/package-info.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/JackrabbitAdminLoginModule.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/JackrabbitDataModelMigration.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryFactory.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryService.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryServiceFactory.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/client/JackrabbitClient.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/client/NonSerialBasicAuthCache.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/fs/AbstractJackrabbitFsProvider.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/fs/DavexFsProvider.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/fs/JackrabbitMemoryFsProvider.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/fs/fs-memory.xml [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/fs/package-info.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/package-info.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/repository-h2.xml [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/repository-localfs.xml [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/repository-memory.xml [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/repository-postgresql-ds.xml [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/repository-postgresql.xml [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/security/JackrabbitSecurityUtils.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/security/package-info.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/unit/AbstractJackrabbitTestCase.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/unit/jaas.config [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/unit/package-info.java [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/unit/repository-h2.xml [deleted file]
org.argeo.jcr/src/org/argeo/jackrabbit/unit/repository-memory.xml [deleted file]
org.argeo.jcr/src/org/argeo/jcr/proxy/AbstractUrlProxy.java [deleted file]
org.argeo.jcr/src/org/argeo/jcr/proxy/ResourceProxy.java [deleted file]
org.argeo.jcr/src/org/argeo/jcr/proxy/ResourceProxyServlet.java [deleted file]
org.argeo.jcr/src/org/argeo/jcr/proxy/package-info.java [deleted file]
org.argeo.jcr/src/org/argeo/jcr/unit/AbstractJcrTestCase.java [deleted file]
org.argeo.jcr/src/org/argeo/jcr/unit/package-info.java [deleted file]
org.argeo.jcr/src/org/argeo/sync/SyncException.java [deleted file]
org.argeo.jcr/src/org/argeo/sync/SyncResult.java [deleted file]
org.argeo.jcr/src/org/argeo/sync/package-info.java [deleted file]
org.argeo.util/.classpath [deleted file]
org.argeo.util/.gitignore [deleted file]
org.argeo.util/.project [deleted file]
org.argeo.util/META-INF/.gitignore [deleted file]
org.argeo.util/bnd.bnd [deleted file]
org.argeo.util/build.properties [deleted file]
org.argeo.util/ext/test/org/argeo/util/CsvParserEncodingTest.java [deleted file]
org.argeo.util/ext/test/org/argeo/util/CsvParserParseFileTest.java [deleted file]
org.argeo.util/ext/test/org/argeo/util/CsvParserTest.java [deleted file]
org.argeo.util/ext/test/org/argeo/util/CsvParserWithQuotedSeparatorTest.java [deleted file]
org.argeo.util/ext/test/org/argeo/util/CsvWriterTest.java [deleted file]
org.argeo.util/ext/test/org/argeo/util/ReferenceFile.csv [deleted file]
org.argeo.util/ext/test/org/argeo/util/TestParse-ISO.csv [deleted file]
org.argeo.util/ext/test/org/argeo/util/TestParse-UTF-8.csv [deleted file]
org.argeo.util/ext/test/org/argeo/util/ThroughputTest.java [deleted file]
org.argeo.util/pom.xml [deleted file]
org.argeo.util/src/org/argeo/util/CsvParser.java [deleted file]
org.argeo.util/src/org/argeo/util/CsvParserWithLinesAsMap.java [deleted file]
org.argeo.util/src/org/argeo/util/CsvWriter.java [deleted file]
org.argeo.util/src/org/argeo/util/DictionaryKeys.java [deleted file]
org.argeo.util/src/org/argeo/util/DigestUtils.java [deleted file]
org.argeo.util/src/org/argeo/util/DirH.java [deleted file]
org.argeo.util/src/org/argeo/util/LangUtils.java [deleted file]
org.argeo.util/src/org/argeo/util/OS.java [deleted file]
org.argeo.util/src/org/argeo/util/PasswordEncryption.java [deleted file]
org.argeo.util/src/org/argeo/util/ServiceChannel.java [deleted file]
org.argeo.util/src/org/argeo/util/StreamUtils.java [deleted file]
org.argeo.util/src/org/argeo/util/Tester.java [deleted file]
org.argeo.util/src/org/argeo/util/TesterStatus.java [deleted file]
org.argeo.util/src/org/argeo/util/Throughput.java [deleted file]
org.argeo.util/src/org/argeo/util/UuidUtils.java [deleted file]
org.argeo.util/src/org/argeo/util/package-info.java [deleted file]
pom.xml

index 836577bb6ea4613c7aa159235ed1768b239fe725..e2492c3a03f976bd6afe0b994ae8a5c2ba13541e 100644 (file)
        <dependencies>
 
                <!-- Argeo Commons -->
-               <dependency>
-                       <groupId>org.argeo.commons</groupId>
-                       <artifactId>org.argeo.util</artifactId>
-                       <version>2.1.89-SNAPSHOT</version>
-               </dependency>
                <dependency>
                        <groupId>org.argeo.commons</groupId>
                        <artifactId>org.argeo.enterprise</artifactId>
index bc810ff41cba7d4a16b756f7af291f8872ab4a17..469d8e28610bc6cf388c557347ae724a2a6c99e9 100644 (file)
@@ -1,4 +1,8 @@
 Import-Package:\
 org.apache.jackrabbit.api,\
 org.apache.jackrabbit.commons,\
+org.apache.jackrabbit.spi,\
+org.apache.jackrabbit.spi2dav,\
+org.apache.jackrabbit.spi2davex,\
+org.apache.jackrabbit.webdav,\
 *
\ No newline at end of file
index 84397864d5971c1e4fab21aed2d65a3e0c6f0e4b..49a93ba4a0bf7ecffb4898671d84c604be9105fc 100644 (file)
@@ -2,9 +2,28 @@ source.. = src/
 output.. = bin/
 bin.includes = META-INF/,\
                .
-additional.bundles = org.apache.jackrabbit.data,\
-                     org.slf4j.log4j12,\
+additional.bundles = org.junit,\
+                     org.hamcrest,\
+                     org.apache.jackrabbit.core,\
+                     javax.jcr,\
+                     org.apache.jackrabbit.api,\
+                     org.apache.jackrabbit.data,\
+                     org.apache.jackrabbit.jcr.commons,\
+                     org.apache.jackrabbit.spi,\
+                     org.apache.jackrabbit.spi.commons,\
                      org.slf4j.api,\
-                     bcprov,\
-                     org.junit,\
-                     org.hamcrest
+                     org.slf4j.log4j12,\
+                     org.apache.log4j,\
+                     org.apache.commons.collections,\
+                     EDU.oswego.cs.dl.util.concurrent,\
+                     org.apache.lucene,\
+                     org.apache.tika.core,\
+                     org.apache.commons.dbcp,\
+                     org.apache.commons.pool,\
+                     com.google.guava,\
+                     org.apache.jackrabbit.jcr2spi,\
+                     org.apache.jackrabbit.spi2dav,\
+                     org.apache.httpcomponents.httpclient,\
+                     org.apache.httpcomponents.httpcore,\
+                     org.apache.tika.parsers
+                     
\ No newline at end of file
diff --git a/org.argeo.core/ext/test/log4j.properties b/org.argeo.core/ext/test/log4j.properties
new file mode 100644 (file)
index 0000000..3d75289
--- /dev/null
@@ -0,0 +1,14 @@
+log4j.rootLogger=WARN, console
+
+## Levels
+log4j.logger.org.argeo=DEBUG
+log4j.logger.org.apache.jackrabbit=OFF
+
+## Appenders
+# console is set to be a ConsoleAppender.
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+
+# console uses PatternLayout.
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+#log4j.appender.console.layout.ConversionPattern= %-5p %d{ISO8601} %m - %c%n
+log4j.appender.console.layout.ConversionPattern=%m%n
diff --git a/org.argeo.core/ext/test/org/argeo/fs/FsUtilsTest.java b/org.argeo.core/ext/test/org/argeo/fs/FsUtilsTest.java
new file mode 100644 (file)
index 0000000..793216b
--- /dev/null
@@ -0,0 +1,49 @@
+package org.argeo.fs;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+
+/** {@link FsUtils} tests. */
+public class FsUtilsTest {
+       final static String FILE00 = "file00";
+       final static String FILE01 = "file01";
+       final static String SUB_DIR = "subDir";
+
+       public void testDelete() throws IOException {
+               Path dir = createDir00();
+               assert Files.exists(dir);
+               FsUtils.delete(dir);
+               assert !Files.exists(dir);
+       }
+
+       public void testSync() throws IOException {
+               Path source = createDir00();
+               Path target = Files.createTempDirectory(getClass().getName());
+               FsUtils.sync(source, target);
+               assert Files.exists(target.resolve(FILE00));
+               assert Files.exists(target.resolve(SUB_DIR));
+               assert Files.exists(target.resolve(SUB_DIR + File.separator + FILE01));
+               FsUtils.delete(source.resolve(SUB_DIR));
+               FsUtils.sync(source, target, true);
+               assert Files.exists(target.resolve(FILE00));
+               assert !Files.exists(target.resolve(SUB_DIR));
+               assert !Files.exists(target.resolve(SUB_DIR + File.separator + FILE01));
+
+               // clean up
+               FsUtils.delete(source);
+               FsUtils.delete(target);
+
+       }
+
+       Path createDir00() throws IOException {
+               Path base = Files.createTempDirectory(getClass().getName());
+               base.toFile().deleteOnExit();
+               Files.createFile(base.resolve(FILE00)).toFile().deleteOnExit();
+               Path subDir = Files.createDirectories(base.resolve(SUB_DIR));
+               subDir.toFile().deleteOnExit();
+               Files.createFile(subDir.resolve(FILE01)).toFile().deleteOnExit();
+               return base;
+       }
+}
diff --git a/org.argeo.core/ext/test/org/argeo/jcr/fs/JcrFileSystemTest.java b/org.argeo.core/ext/test/org/argeo/jcr/fs/JcrFileSystemTest.java
new file mode 100644 (file)
index 0000000..2d03b8f
--- /dev/null
@@ -0,0 +1,191 @@
+package org.argeo.jcr.fs;
+
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URI;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.nio.file.attribute.FileTime;
+import java.nio.file.spi.FileSystemProvider;
+import java.util.Arrays;
+import java.util.Map;
+
+import javax.jcr.Property;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.nodetype.NodeType;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.core.RepositoryImpl;
+import org.argeo.jackrabbit.fs.JackrabbitMemoryFsProvider;
+
+import junit.framework.TestCase;
+
+public class JcrFileSystemTest extends TestCase {
+       private final static Log log = LogFactory.getLog(JcrFileSystemTest.class);
+
+       public void testMounts() throws Exception {
+               JackrabbitMemoryFsProvider fsProvider = new JackrabbitMemoryFsProvider() {
+
+                       @Override
+                       protected void postRepositoryCreation(RepositoryImpl repositoryImpl) throws RepositoryException {
+                               // create workspace
+                               Session session = login();
+                               session.getWorkspace().createWorkspace("test");
+                       }
+
+               };
+
+               Path rootPath = fsProvider.getPath(new URI("jcr+memory:/"));
+               log.debug("Got root " + rootPath);
+               Path testDir = rootPath.resolve("testDir");
+               Files.createDirectory(testDir);
+
+               Path testMount = fsProvider.getPath(new URI("jcr+memory:/test"));
+               log.debug("Test path");
+               assertEquals(rootPath, testMount.getParent());
+               assertEquals(testMount.getFileName(), rootPath.relativize(testMount));
+
+               Path testPath = testMount.resolve("test.txt");
+               log.debug("Create file " + testPath);
+               Files.createFile(testPath);
+               BasicFileAttributes bfa = Files.readAttributes(testPath, BasicFileAttributes.class);
+               FileTime ft = bfa.creationTime();
+               assertNotNull(ft);
+               assertTrue(bfa.isRegularFile());
+               log.debug("Created " + testPath + " (" + ft + ")");
+               Files.delete(testPath);
+               log.debug("Deleted " + testPath);
+
+               // Browse directories from root
+               DirectoryStream<Path> files = Files.newDirectoryStream(rootPath);
+               int directoryCount = 0;
+               for (Path file : files) {
+                       if (Files.isDirectory(file)) {
+                               directoryCount++;
+                       }
+               }
+               assertEquals(2, directoryCount);
+
+               // Browse directories from mount
+               Path mountSubDir = testMount.resolve("mountSubDir");
+               Files.createDirectory(mountSubDir);
+               Path otherSubDir = testMount.resolve("otherSubDir");
+               Files.createDirectory(otherSubDir);
+               testPath = testMount.resolve("test.txt");
+               Files.createFile(testPath);
+               files = Files.newDirectoryStream(testMount);
+               int fileCount = 0;
+               for (Path file : files) {
+                       fileCount++;
+               }
+               assertEquals(3, fileCount);
+
+       }
+
+       public void testSimple() throws Exception {
+               FileSystemProvider fsProvider = new JackrabbitMemoryFsProvider();
+
+               // Simple file
+               Path rootPath = fsProvider.getPath(new URI("jcr+memory:/"));
+               log.debug("Got root " + rootPath);
+               Path testPath = fsProvider.getPath(new URI("jcr+memory:/test.txt"));
+               log.debug("Test path");
+               assertEquals("test.txt", testPath.getFileName().toString());
+               assertEquals(rootPath, testPath.getParent());
+               assertEquals(testPath.getFileName(), rootPath.relativize(testPath));
+               // relativize self should be empty path
+               Path selfRelative = testPath.relativize(testPath);
+               assertEquals("", selfRelative.toString());
+
+               log.debug("Create file " + testPath);
+               Files.createFile(testPath);
+               BasicFileAttributes bfa = Files.readAttributes(testPath, BasicFileAttributes.class);
+               FileTime ft = bfa.creationTime();
+               assertNotNull(ft);
+               assertTrue(bfa.isRegularFile());
+               log.debug("Created " + testPath + " (" + ft + ")");
+               Files.delete(testPath);
+               log.debug("Deleted " + testPath);
+               String txt = "TEST\nTEST2\n";
+               byte[] arr = txt.getBytes();
+               Files.write(testPath, arr);
+               log.debug("Wrote " + testPath);
+               byte[] read = Files.readAllBytes(testPath);
+               assertTrue(Arrays.equals(arr, read));
+               assertEquals(txt, new String(read));
+               log.debug("Read " + testPath);
+               Path testDir = rootPath.resolve("testDir");
+               log.debug("Resolved " + testDir);
+               // Copy
+               Files.createDirectory(testDir);
+               log.debug("Created directory " + testDir);
+               Path subsubdir = Files.createDirectories(testDir.resolve("subdir/subsubdir"));
+               log.debug("Created sub directories " + subsubdir);
+               Path copiedFile = testDir.resolve("copiedFile.txt");
+               log.debug("Resolved " + copiedFile);
+               Path relativeCopiedFile = testDir.relativize(copiedFile);
+               assertEquals(copiedFile.getFileName().toString(), relativeCopiedFile.toString());
+               log.debug("Relative copied file " + relativeCopiedFile);
+               try (OutputStream out = Files.newOutputStream(copiedFile); InputStream in = Files.newInputStream(testPath)) {
+                       IOUtils.copy(in, out);
+               }
+               log.debug("Copied " + testPath + " to " + copiedFile);
+               Files.delete(testPath);
+               log.debug("Deleted " + testPath);
+               byte[] copiedRead = Files.readAllBytes(copiedFile);
+               assertTrue(Arrays.equals(copiedRead, read));
+               log.debug("Read " + copiedFile);
+               // Browse directories
+               DirectoryStream<Path> files = Files.newDirectoryStream(testDir);
+               int fileCount = 0;
+               Path listedFile = null;
+               for (Path file : files) {
+                       fileCount++;
+                       if (!Files.isDirectory(file))
+                               listedFile = file;
+               }
+               assertEquals(2, fileCount);
+               assertEquals(copiedFile, listedFile);
+               assertEquals(copiedFile.toString(), listedFile.toString());
+               log.debug("Listed " + testDir);
+               // Generic attributes
+               Map<String, Object> attrs = Files.readAttributes(copiedFile, "*");
+               assertEquals(3, attrs.size());
+               log.debug("Read attributes of " + copiedFile + ": " + attrs.keySet());
+               // Direct node access
+               NodeFileAttributes nfa = Files.readAttributes(copiedFile, NodeFileAttributes.class);
+               nfa.getNode().addMixin(NodeType.MIX_LANGUAGE);
+               nfa.getNode().getSession().save();
+               log.debug("Add mix:language");
+               Files.setAttribute(copiedFile, Property.JCR_LANGUAGE, "fr");
+               log.debug("Set language");
+               attrs = Files.readAttributes(copiedFile, "*");
+               assertEquals(4, attrs.size());
+               log.debug("Read attributes of " + copiedFile + ": " + attrs.keySet());
+       }
+
+       public void testIllegalCharacters() throws Exception {
+               FileSystemProvider fsProvider = new JackrabbitMemoryFsProvider();
+               String fileName = "tüßçt[1].txt";
+               String pathStr = "/testDir/" + fileName;
+               Path testDir = fsProvider.getPath(new URI("jcr+memory:/testDir"));
+               Files.createDirectory(testDir);
+               Path testPath = testDir.resolve(fileName);
+               assertEquals(pathStr, testPath.toString());
+               Files.createFile(testPath);
+               DirectoryStream<Path> files = Files.newDirectoryStream(testDir);
+               Path listedPath = files.iterator().next();
+               assertEquals(pathStr, listedPath.toString());
+
+               String dirName = "*[~WeirdDir~]*";
+               Path subDir = testDir.resolve(dirName);
+               Files.createDirectory(subDir);
+               subDir = testDir.resolve(dirName);
+               assertEquals(dirName, subDir.getFileName().toString());
+       }
+}
diff --git a/org.argeo.core/ext/test/org/argeo/util/CsvParserEncodingTest.java b/org.argeo.core/ext/test/org/argeo/util/CsvParserEncodingTest.java
new file mode 100644 (file)
index 0000000..09443c2
--- /dev/null
@@ -0,0 +1,36 @@
+package org.argeo.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.List;
+
+/** Tests that {@link CsvParser} can deal properly with encodings. */
+public class CsvParserEncodingTest {
+
+       private String iso = "ISO-8859-1";
+       private String utf8 = "UTF-8";
+
+       public void testParse() throws Exception {
+
+               String xml = new String("áéíóúñ,éééé");
+               byte[] utfBytes = xml.getBytes(utf8);
+               byte[] isoBytes = xml.getBytes(iso);
+
+               InputStream inUtf = new ByteArrayInputStream(utfBytes);
+               InputStream inIso = new ByteArrayInputStream(isoBytes);
+
+               CsvParser csvParser = new CsvParser() {
+                       protected void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
+                               assert header.size() == tokens.size();
+                               assert 2 == tokens.size();
+                               assert "áéíóúñ".equals(tokens.get(0));
+                               assert "éééé".equals(tokens.get(1));
+                       }
+               };
+
+               csvParser.parse(inUtf, utf8);
+               inUtf.close();
+               csvParser.parse(inIso, iso);
+               inIso.close();
+       }
+}
diff --git a/org.argeo.core/ext/test/org/argeo/util/CsvParserParseFileTest.java b/org.argeo.core/ext/test/org/argeo/util/CsvParserParseFileTest.java
new file mode 100644 (file)
index 0000000..5a92c68
--- /dev/null
@@ -0,0 +1,25 @@
+package org.argeo.util;
+
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+/** Test that {@link CsvParser} can properly parse a CSV file. */
+public class CsvParserParseFileTest {
+       public void testParse() throws Exception {
+
+               final Map<Integer, Map<String, String>> lines = new HashMap<Integer, Map<String, String>>();
+               InputStream in = getClass().getResourceAsStream("/org/argeo/util/ReferenceFile.csv");
+               CsvParserWithLinesAsMap parser = new CsvParserWithLinesAsMap() {
+                       protected void processLine(Integer lineNumber, Map<String, String> line) {
+                               lines.put(lineNumber, line);
+                       }
+               };
+
+               parser.parse(in);
+               in.close();
+
+               assert 5 == lines.size();
+       }
+
+}
diff --git a/org.argeo.core/ext/test/org/argeo/util/CsvParserTest.java b/org.argeo.core/ext/test/org/argeo/util/CsvParserTest.java
new file mode 100644 (file)
index 0000000..e59dbd1
--- /dev/null
@@ -0,0 +1,30 @@
+package org.argeo.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.List;
+
+/** {@link CsvParser} tests. */
+public class CsvParserTest {
+       public void testParse() throws Exception {
+               String toParse = "Header1,\"Header\n2\",Header3,\"Header4\"\n" + "Col1,\"Col\n2\",Col3,\"\"\"Col4\"\"\"\n"
+                               + "Col1,\"Col\n2\",Col3,\"\"\"Col4\"\"\"\n" + "Col1,\"Col\n2\",Col3,\"\"\"Col4\"\"\"\n";
+
+               InputStream in = new ByteArrayInputStream(toParse.getBytes());
+
+               CsvParser csvParser = new CsvParser() {
+                       protected void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
+                               assert header.size() == tokens.size();
+                               assert 4 == tokens.size();
+                               assert "Col1".equals(tokens.get(0));
+                               assert "Col\n2".equals(tokens.get(1));
+                               assert "Col3".equals(tokens.get(2));
+                               assert "\"Col4\"".equals(tokens.get(3));
+                       }
+               };
+
+               csvParser.parse(in);
+               in.close();
+       }
+
+}
diff --git a/org.argeo.core/ext/test/org/argeo/util/CsvParserWithQuotedSeparatorTest.java b/org.argeo.core/ext/test/org/argeo/util/CsvParserWithQuotedSeparatorTest.java
new file mode 100644 (file)
index 0000000..67ba346
--- /dev/null
@@ -0,0 +1,58 @@
+package org.argeo.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/** Test that {@link CsvParser} deals properly with "" quotes. */
+public class CsvParserWithQuotedSeparatorTest {
+       public void testSimpleParse() throws Exception {
+               String toParse = "Header1,\"Header2\",Header3,\"Header4\"\n"
+                               + "\"Col1, Col2\",\"Col\n2\",Col3,\"\"\"Col4\"\"\"\n";
+
+               InputStream in = new ByteArrayInputStream(toParse.getBytes());
+
+               CsvParser csvParser = new CsvParser() {
+                       protected void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
+                               assert header.size() == tokens.size();
+                               assert 4 == tokens.size();
+                               assert "Col1, Col2".equals(tokens.get(0));
+                       }
+               };
+               // System.out.println(toParse);
+               csvParser.parse(in);
+               in.close();
+
+       }
+
+       public void testParseFile() throws Exception {
+
+               final Map<Integer, Map<String, String>> lines = new HashMap<Integer, Map<String, String>>();
+               InputStream in = getClass().getResourceAsStream("/org/argeo/util/ReferenceFile.csv");
+
+               CsvParserWithLinesAsMap parser = new CsvParserWithLinesAsMap() {
+                       protected void processLine(Integer lineNumber, Map<String, String> line) {
+                               // System.out.println("processing line #" + lineNumber);
+                               lines.put(lineNumber, line);
+                       }
+               };
+
+               parser.parse(in);
+               in.close();
+
+               Map<String, String> line = lines.get(2);
+               assert ",,,,".equals(line.get("Coma testing"));
+               line = lines.get(3);
+               assert ",, ,,".equals(line.get("Coma testing"));
+               line = lines.get(4);
+               assert "module1, module2".equals(line.get("Coma testing"));
+               line = lines.get(5);
+               assert "module1,module2".equals(line.get("Coma testing"));
+               line = lines.get(6);
+               assert ",module1,module2, \nmodule3, module4".equals(line.get("Coma testing"));
+               assert 5 == lines.size();
+
+       }
+}
diff --git a/org.argeo.core/ext/test/org/argeo/util/CsvWriterTest.java b/org.argeo.core/ext/test/org/argeo/util/CsvWriterTest.java
new file mode 100644 (file)
index 0000000..ff5dcc5
--- /dev/null
@@ -0,0 +1,47 @@
+package org.argeo.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/** {@link CsvWriter} tests. */
+public class CsvWriterTest {
+       public void testWrite() throws Exception {
+               ByteArrayOutputStream out = new ByteArrayOutputStream();
+               final CsvWriter csvWriter = new CsvWriter(out);
+
+               String[] header = { "Header1", "Header 2", "Header,3", "Header\n4", "Header\"5\"" };
+               String[] line1 = { "Value1", "Value 2", "Value,3", "Value\n4", "Value\"5\"" };
+               csvWriter.writeLine(Arrays.asList(header));
+               csvWriter.writeLine(Arrays.asList(line1));
+
+               String reference = "Header1,Header 2,\"Header,3\",\"Header\n4\",\"Header\"\"5\"\"\"\n"
+                               + "Value1,Value 2,\"Value,3\",\"Value\n4\",\"Value\"\"5\"\"\"\n";
+               String written = new String(out.toByteArray());
+               assert reference.equals(written);
+               out.close();
+               System.out.println(written);
+
+               final List<String> allTokens = new ArrayList<String>();
+               CsvParser csvParser = new CsvParser() {
+                       protected void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
+                               if (lineNumber == 2)
+                                       allTokens.addAll(header);
+                               allTokens.addAll(tokens);
+                       }
+               };
+               ByteArrayInputStream in = new ByteArrayInputStream(written.getBytes());
+               csvParser.parse(in);
+               in.close();
+               List<String> allTokensRef = new ArrayList<String>();
+               allTokensRef.addAll(Arrays.asList(header));
+               allTokensRef.addAll(Arrays.asList(line1));
+
+               assert allTokensRef.size() == allTokens.size();
+               for (int i = 0; i < allTokensRef.size(); i++)
+                       assert allTokensRef.get(i).equals(allTokens.get(i));
+       }
+
+}
diff --git a/org.argeo.core/ext/test/org/argeo/util/ReferenceFile.csv b/org.argeo.core/ext/test/org/argeo/util/ReferenceFile.csv
new file mode 100644 (file)
index 0000000..351453d
--- /dev/null
@@ -0,0 +1,37 @@
+"ID","A long Text","Name","Other","Number","Reference","Target","Date","Update","Language","ID Ref","Weird chars","line feeds","after line feed","Empty column","Status comment","Comments","Empty","Coma testing"
+"AK251","Everything & with some line feed 
+ more “some” quote","Marge S.",,78.6,"A1155222221111268515131",,12/12/12,03/12/08,,9821308500721,"%%%ùù","ao","Nothing special",,,"Some very usefull comment",,",,,,"
+"AG254","same","Roger “wallace” Big","15 – JI",78.5,"A1155222221111268515131","next milestone",12/12/12,03/12/08,"_fr (French - France)",9812309500953,"***µ”","a
+
+
+
+
+o","after line feed",,"Do the job",,,",, ,,"
+"FG211","Very long text with some bullets.
+1 first
+2 second
+3. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long","Father & Son","15 – JI",15.4,"A1155222221111268515131","next milestone",12/12/12,03/12/08,"_fr (French - France)",9812309500952,"///","a
+
+
+
+
+
+
+o","module1,module2",,"Be fast",,,"module1, module2"
+"RRT152","Very long text with some bullets.
+1 first
+2 second
+3. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long","Another $$","15 – JI",12.3,"A1155222221111268515131","next milestone",12/12/12,03/12/08,"_fr (French - France)",9812309500950,"---","a
+
+o
+
+
+","module1,module2",,,,,"module1,module2"
+"YU121","Another use case : “blank line”
+
+After the blank.","nothing with brackets( )","15 – JI",15.2,"A1155222221111268515131",,12/12/12,03/12/08,"_fr (French - France)",9812309500925,",;:?./","ao","
+
+
+
+After line feed again",,,,,",module1,module2, 
+module3, module4"
diff --git a/org.argeo.core/ext/test/org/argeo/util/TestParse-ISO.csv b/org.argeo.core/ext/test/org/argeo/util/TestParse-ISO.csv
new file mode 100644 (file)
index 0000000..0bec611
--- /dev/null
@@ -0,0 +1,8 @@
+"Date d'imputation","N° de compte","Code journal","Pièce interne","Pièce externe","Libellé d'écriture","Débit","Crédit","Lettrage","Quantité","Code analytique","Date d'échéance","Date d'imputation origine","Code journal origine","Mode de règlement","Date début de période","Date fin de période"
+26.01.2010,"101300","BQ","BQ01.10",,"Depot société en formation",,"3.000,00",,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"101300","BQ","BQ01.10",,"Depot société en formation",,"7.000,00",,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"411OPEN","BQ","BQ01.10",,"Vir Client ",,"2.508,00","A",,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"455100","BQ","BQ01.10",,"Bankomat Raiffeise","250,00",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"512101","BQ","BQ01.10",,"Extrait bancaire 01.10","12.250,55",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"627800","BQ","BQ01.10",,"Envoi de chequier","2,30",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"627800","BQ","BQ01.10",,"Frais d'expedition","5,15",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
diff --git a/org.argeo.core/ext/test/org/argeo/util/TestParse-UTF-8.csv b/org.argeo.core/ext/test/org/argeo/util/TestParse-UTF-8.csv
new file mode 100644 (file)
index 0000000..0bec611
--- /dev/null
@@ -0,0 +1,8 @@
+"Date d'imputation","N° de compte","Code journal","Pièce interne","Pièce externe","Libellé d'écriture","Débit","Crédit","Lettrage","Quantité","Code analytique","Date d'échéance","Date d'imputation origine","Code journal origine","Mode de règlement","Date début de période","Date fin de période"
+26.01.2010,"101300","BQ","BQ01.10",,"Depot société en formation",,"3.000,00",,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"101300","BQ","BQ01.10",,"Depot société en formation",,"7.000,00",,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"411OPEN","BQ","BQ01.10",,"Vir Client ",,"2.508,00","A",,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"455100","BQ","BQ01.10",,"Bankomat Raiffeise","250,00",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"512101","BQ","BQ01.10",,"Extrait bancaire 01.10","12.250,55",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"627800","BQ","BQ01.10",,"Envoi de chequier","2,30",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
+26.01.2010,"627800","BQ","BQ01.10",,"Frais d'expedition","5,15",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
diff --git a/org.argeo.core/ext/test/org/argeo/util/ThroughputTest.java b/org.argeo.core/ext/test/org/argeo/util/ThroughputTest.java
new file mode 100644 (file)
index 0000000..d62f55c
--- /dev/null
@@ -0,0 +1,17 @@
+package org.argeo.util;
+
+public class ThroughputTest {
+       public void testParse() throws Exception {
+//             assert 0 == 1;
+
+               Throughput t;
+               t = new Throughput("3.54/s");
+               assert 3.54d == t.getValue();
+               assert Throughput.Unit.s.equals(t.getUnit());
+               assert 282l == (long) t.asMsPeriod();
+
+               t = new Throughput("35698.2569/h");
+               assert Throughput.Unit.h.equals(t.getUnit());
+               assert 101l == (long) t.asMsPeriod();
+       }
+}
index 96549e653f4d1f10904cab0d4c11d59c45df2ebb..5dfcdde216c9fbb344da342d0c10b841dde9cae8 100644 (file)
        <artifactId>org.argeo.core</artifactId>
        <name>Commons Third Parties Utilities</name>
        <dependencies>
-               <dependency>
-                       <groupId>org.argeo.commons</groupId>
-                       <artifactId>org.argeo.util</artifactId>
-                       <version>2.1.89-SNAPSHOT</version>
-               </dependency>
                <dependency>
                        <groupId>org.argeo.commons</groupId>
                        <artifactId>org.argeo.enterprise</artifactId>
diff --git a/org.argeo.core/src/org/argeo/fs/BasicSyncFileVisitor.java b/org.argeo.core/src/org/argeo/fs/BasicSyncFileVisitor.java
new file mode 100644 (file)
index 0000000..03bac59
--- /dev/null
@@ -0,0 +1,164 @@
+package org.argeo.fs;
+
+import java.io.IOException;
+import java.nio.file.FileVisitResult;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.SimpleFileVisitor;
+import java.nio.file.StandardCopyOption;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.nio.file.attribute.FileTime;
+
+import org.argeo.sync.SyncResult;
+
+/** Synchronises two directory structures. */
+public class BasicSyncFileVisitor extends SimpleFileVisitor<Path> {
+       // TODO make it configurable
+       private boolean trace = false;
+
+       private final Path sourceBasePath;
+       private final Path targetBasePath;
+       private final boolean delete;
+       private final boolean recursive;
+
+       private SyncResult<Path> syncResult = new SyncResult<>();
+
+       public BasicSyncFileVisitor(Path sourceBasePath, Path targetBasePath, boolean delete, boolean recursive) {
+               this.sourceBasePath = sourceBasePath;
+               this.targetBasePath = targetBasePath;
+               this.delete = delete;
+               this.recursive = recursive;
+       }
+
+       @Override
+       public FileVisitResult preVisitDirectory(Path sourceDir, BasicFileAttributes attrs) throws IOException {
+               if (!recursive && !sourceDir.equals(sourceBasePath))
+                       return FileVisitResult.SKIP_SUBTREE;
+               Path targetDir = toTargetPath(sourceDir);
+               Files.createDirectories(targetDir);
+               return FileVisitResult.CONTINUE;
+       }
+
+       @Override
+       public FileVisitResult postVisitDirectory(Path sourceDir, IOException exc) throws IOException {
+               if (delete) {
+                       Path targetDir = toTargetPath(sourceDir);
+                       for (Path targetPath : Files.newDirectoryStream(targetDir)) {
+                               Path sourcePath = sourceDir.resolve(targetPath.getFileName());
+                               if (!Files.exists(sourcePath)) {
+                                       try {
+                                               FsUtils.delete(targetPath);
+                                               deleted(targetPath);
+                                       } catch (Exception e) {
+                                               deleteFailed(targetPath, exc);
+                                       }
+                               }
+                       }
+               }
+               return FileVisitResult.CONTINUE;
+       }
+
+       @Override
+       public FileVisitResult visitFile(Path sourceFile, BasicFileAttributes attrs) throws IOException {
+               Path targetFile = toTargetPath(sourceFile);
+               try {
+                       if (!Files.exists(targetFile)) {
+                               Files.copy(sourceFile, targetFile);
+                               added(sourceFile, targetFile);
+                       } else {
+                               if (shouldOverwrite(sourceFile, targetFile)) {
+                                       Files.copy(sourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING);
+                               }
+                       }
+               } catch (Exception e) {
+                       copyFailed(sourceFile, targetFile, e);
+               }
+               return FileVisitResult.CONTINUE;
+       }
+
+       protected boolean shouldOverwrite(Path sourceFile, Path targetFile) throws IOException {
+               long sourceSize = Files.size(sourceFile);
+               long targetSize = Files.size(targetFile);
+               if (sourceSize != targetSize) {
+                       return true;
+               }
+               FileTime sourceLastModif = Files.getLastModifiedTime(sourceFile);
+               FileTime targetLastModif = Files.getLastModifiedTime(targetFile);
+               if (sourceLastModif.compareTo(targetLastModif) > 0)
+                       return true;
+               return shouldOverwriteLaterSameSize(sourceFile, targetFile);
+       }
+
+       protected boolean shouldOverwriteLaterSameSize(Path sourceFile, Path targetFile) {
+               return false;
+       }
+
+//     @Override
+//     public FileVisitResult visitFileFailed(Path sourceFile, IOException exc) throws IOException {
+//             error("Cannot sync " + sourceFile, exc);
+//             return FileVisitResult.CONTINUE;
+//     }
+
+       private Path toTargetPath(Path sourcePath) {
+               Path relativePath = sourceBasePath.relativize(sourcePath);
+               Path targetPath = targetBasePath.resolve(relativePath.toString());
+               return targetPath;
+       }
+
+       public Path getSourceBasePath() {
+               return sourceBasePath;
+       }
+
+       public Path getTargetBasePath() {
+               return targetBasePath;
+       }
+
+       protected void added(Path sourcePath, Path targetPath) {
+               syncResult.getAdded().add(targetPath);
+               if (isTraceEnabled())
+                       trace("Added " + sourcePath + " as " + targetPath);
+       }
+
+       protected void modified(Path sourcePath, Path targetPath) {
+               syncResult.getModified().add(targetPath);
+               if (isTraceEnabled())
+                       trace("Overwritten from " + sourcePath + " to " + targetPath);
+       }
+
+       protected void copyFailed(Path sourcePath, Path targetPath, Exception e) {
+               syncResult.addError(sourcePath, targetPath, e);
+               if (isTraceEnabled())
+                       error("Cannot copy " + sourcePath + " to " + targetPath, e);
+       }
+
+       protected void deleted(Path targetPath) {
+               syncResult.getDeleted().add(targetPath);
+               if (isTraceEnabled())
+                       trace("Deleted " + targetPath);
+       }
+
+       protected void deleteFailed(Path targetPath, Exception e) {
+               syncResult.addError(null, targetPath, e);
+               if (isTraceEnabled())
+                       error("Cannot delete " + targetPath, e);
+       }
+
+       /** Log error. */
+       protected void error(Object obj, Throwable e) {
+               System.err.println(obj);
+               e.printStackTrace();
+       }
+
+       protected boolean isTraceEnabled() {
+               return trace;
+       }
+
+       protected void trace(Object obj) {
+               System.out.println(obj);
+       }
+
+       public SyncResult<Path> getSyncResult() {
+               return syncResult;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/fs/FsUtils.java b/org.argeo.core/src/org/argeo/fs/FsUtils.java
new file mode 100644 (file)
index 0000000..c96f56e
--- /dev/null
@@ -0,0 +1,58 @@
+package org.argeo.fs;
+
+import java.io.IOException;
+import java.nio.file.FileVisitResult;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.SimpleFileVisitor;
+import java.nio.file.attribute.BasicFileAttributes;
+
+/** Utilities around the standard Java file abstractions. */
+public class FsUtils {
+       /** Sync a source path with a target path. */
+       public static void sync(Path sourceBasePath, Path targetBasePath) {
+               sync(sourceBasePath, targetBasePath, false);
+       }
+
+       /** Sync a source path with a target path. */
+       public static void sync(Path sourceBasePath, Path targetBasePath, boolean delete) {
+               sync(new BasicSyncFileVisitor(sourceBasePath, targetBasePath, delete, true));
+       }
+
+       public static void sync(BasicSyncFileVisitor syncFileVisitor) {
+               try {
+                       Files.walkFileTree(syncFileVisitor.getSourceBasePath(), syncFileVisitor);
+               } catch (Exception e) {
+                       throw new RuntimeException("Cannot sync " + syncFileVisitor.getSourceBasePath() + " with "
+                                       + syncFileVisitor.getTargetBasePath(), e);
+               }
+       }
+
+       /** Deletes this path, recursively if needed. */
+       public static void delete(Path path) {
+               try {
+                       Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
+                               @Override
+                               public FileVisitResult postVisitDirectory(Path directory, IOException e) throws IOException {
+                                       if (e != null)
+                                               throw e;
+                                       Files.delete(directory);
+                                       return FileVisitResult.CONTINUE;
+                               }
+
+                               @Override
+                               public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+                                       Files.delete(file);
+                                       return FileVisitResult.CONTINUE;
+                               }
+                       });
+               } catch (IOException e) {
+                       throw new RuntimeException("Cannot delete " + path, e);
+               }
+       }
+
+       /** Singleton. */
+       private FsUtils() {
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/fs/package-info.java b/org.argeo.core/src/org/argeo/fs/package-info.java
new file mode 100644 (file)
index 0000000..ea2de9e
--- /dev/null
@@ -0,0 +1,2 @@
+/** Generic file system utilities. */
+package org.argeo.fs;
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/JackrabbitAdminLoginModule.java b/org.argeo.core/src/org/argeo/jackrabbit/JackrabbitAdminLoginModule.java
new file mode 100644 (file)
index 0000000..7396c87
--- /dev/null
@@ -0,0 +1,48 @@
+package org.argeo.jackrabbit;
+
+import java.util.Map;
+
+import javax.security.auth.Subject;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.login.LoginException;
+import javax.security.auth.spi.LoginModule;
+
+import org.apache.jackrabbit.core.security.SecurityConstants;
+import org.apache.jackrabbit.core.security.principal.AdminPrincipal;
+
+@Deprecated
+public class JackrabbitAdminLoginModule implements LoginModule {
+       private Subject subject;
+
+       @Override
+       public void initialize(Subject subject, CallbackHandler callbackHandler,
+                       Map<String, ?> sharedState, Map<String, ?> options) {
+               this.subject = subject;
+       }
+
+       @Override
+       public boolean login() throws LoginException {
+               // TODO check permission?
+               return true;
+       }
+
+       @Override
+       public boolean commit() throws LoginException {
+               subject.getPrincipals().add(
+                               new AdminPrincipal(SecurityConstants.ADMIN_ID));
+               return true;
+       }
+
+       @Override
+       public boolean abort() throws LoginException {
+               return true;
+       }
+
+       @Override
+       public boolean logout() throws LoginException {
+               subject.getPrincipals().removeAll(
+                               subject.getPrincipals(AdminPrincipal.class));
+               return true;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/JackrabbitDataModelMigration.java b/org.argeo.core/src/org/argeo/jackrabbit/JackrabbitDataModelMigration.java
new file mode 100644 (file)
index 0000000..838446d
--- /dev/null
@@ -0,0 +1,166 @@
+package org.argeo.jackrabbit;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.net.URL;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.jackrabbit.commons.cnd.CndImporter;
+import org.apache.jackrabbit.commons.cnd.ParseException;
+import org.apache.jackrabbit.core.config.RepositoryConfig;
+import org.argeo.jcr.JcrException;
+import org.argeo.jcr.JcrCallback;
+import org.argeo.jcr.JcrUtils;
+
+/** Migrate the data in a Jackrabbit repository. */
+@Deprecated
+public class JackrabbitDataModelMigration implements Comparable<JackrabbitDataModelMigration> {
+//     private final static Log log = LogFactory.getLog(JackrabbitDataModelMigration.class);
+
+       private String dataModelNodePath;
+       private String targetVersion;
+       private URL migrationCnd;
+       private JcrCallback dataModification;
+
+       /**
+        * Expects an already started repository with the old data model to migrate.
+        * Expects to be run with admin rights (Repository.login() will be used).
+        * 
+        * @return true if a migration was performed and the repository needs to be
+        *         restarted and its caches cleared.
+        */
+       public Boolean migrate(Session session) {
+               long begin = System.currentTimeMillis();
+               Reader reader = null;
+               try {
+                       // check if already migrated
+                       if (!session.itemExists(dataModelNodePath)) {
+//                             log.warn("Node " + dataModelNodePath + " does not exist: nothing to migrate.");
+                               return false;
+                       }
+//                     Node dataModelNode = session.getNode(dataModelNodePath);
+//                     if (dataModelNode.hasProperty(ArgeoNames.ARGEO_DATA_MODEL_VERSION)) {
+//                             String currentVersion = dataModelNode.getProperty(
+//                                             ArgeoNames.ARGEO_DATA_MODEL_VERSION).getString();
+//                             if (compareVersions(currentVersion, targetVersion) >= 0) {
+//                                     log.info("Data model at version " + currentVersion
+//                                                     + ", no need to migrate.");
+//                                     return false;
+//                             }
+//                     }
+
+                       // apply transitional CND
+                       if (migrationCnd != null) {
+                               reader = new InputStreamReader(migrationCnd.openStream());
+                               CndImporter.registerNodeTypes(reader, session, true);
+                               session.save();
+//                             log.info("Registered migration node types from " + migrationCnd);
+                       }
+
+                       // modify data
+                       dataModification.execute(session);
+
+                       // apply changes
+                       session.save();
+
+                       long duration = System.currentTimeMillis() - begin;
+//                     log.info("Migration of data model " + dataModelNodePath + " to " + targetVersion + " performed in "
+//                                     + duration + "ms");
+                       return true;
+               } catch (RepositoryException e) {
+                       JcrUtils.discardQuietly(session);
+                       throw new JcrException(
+                                       "Migration of data model " + dataModelNodePath + " to " + targetVersion + " failed.", e);
+               } catch (ParseException | IOException e) {
+                       JcrUtils.discardQuietly(session);
+                       throw new RuntimeException(
+                                       "Migration of data model " + dataModelNodePath + " to " + targetVersion + " failed.", e);
+               } finally {
+                       JcrUtils.logoutQuietly(session);
+                       IOUtils.closeQuietly(reader);
+               }
+       }
+
+       protected static int compareVersions(String version1, String version2) {
+               // TODO do a proper version analysis and comparison
+               return version1.compareTo(version2);
+       }
+
+       /** To be called on a stopped repository. */
+       public static void clearRepositoryCaches(RepositoryConfig repositoryConfig) {
+               try {
+                       String customeNodeTypesPath = "/nodetypes/custom_nodetypes.xml";
+                       // FIXME causes weird error in Eclipse
+//                      repositoryConfig.getFileSystem().deleteFile(customeNodeTypesPath);
+//                     if (log.isDebugEnabled())
+//                             log.debug("Cleared " + customeNodeTypesPath);
+               } catch (RuntimeException e) {
+                       throw e;
+               }
+
+               // File customNodeTypes = new File(home.getPath()
+               // + "/repository/nodetypes/custom_nodetypes.xml");
+               // if (customNodeTypes.exists()) {
+               // customNodeTypes.delete();
+               // if (log.isDebugEnabled())
+               // log.debug("Cleared " + customNodeTypes);
+               // } else {
+               // log.warn("File " + customNodeTypes + " not found.");
+               // }
+       }
+
+       /*
+        * FOR USE IN (SORTED) SETS
+        */
+
+       public int compareTo(JackrabbitDataModelMigration dataModelMigration) {
+               // TODO make ordering smarter
+               if (dataModelNodePath.equals(dataModelMigration.dataModelNodePath))
+                       return compareVersions(targetVersion, dataModelMigration.targetVersion);
+               else
+                       return dataModelNodePath.compareTo(dataModelMigration.dataModelNodePath);
+       }
+
+       @Override
+       public boolean equals(Object obj) {
+               if (!(obj instanceof JackrabbitDataModelMigration))
+                       return false;
+               JackrabbitDataModelMigration dataModelMigration = (JackrabbitDataModelMigration) obj;
+               return dataModelNodePath.equals(dataModelMigration.dataModelNodePath)
+                               && targetVersion.equals(dataModelMigration.targetVersion);
+       }
+
+       @Override
+       public int hashCode() {
+               return targetVersion.hashCode();
+       }
+
+       public void setDataModelNodePath(String dataModelNodePath) {
+               this.dataModelNodePath = dataModelNodePath;
+       }
+
+       public void setTargetVersion(String targetVersion) {
+               this.targetVersion = targetVersion;
+       }
+
+       public void setMigrationCnd(URL migrationCnd) {
+               this.migrationCnd = migrationCnd;
+       }
+
+       public void setDataModification(JcrCallback dataModification) {
+               this.dataModification = dataModification;
+       }
+
+       public String getDataModelNodePath() {
+               return dataModelNodePath;
+       }
+
+       public String getTargetVersion() {
+               return targetVersion;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryFactory.java b/org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryFactory.java
new file mode 100644 (file)
index 0000000..77ad527
--- /dev/null
@@ -0,0 +1,26 @@
+package org.argeo.jackrabbit.client;
+
+import java.util.Map;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.RepositoryFactory;
+
+import org.apache.jackrabbit.jcr2spi.Jcr2spiRepositoryFactory;
+import org.apache.jackrabbit.jcr2spi.RepositoryImpl;
+import org.apache.jackrabbit.spi.RepositoryServiceFactory;
+
+/** A customised {@link RepositoryFactory} access a remote DAVEX service. */
+public class ClientDavexRepositoryFactory implements RepositoryFactory {
+       public final static String JACKRABBIT_DAVEX_URI = ClientDavexRepositoryServiceFactory.PARAM_REPOSITORY_URI;
+       public final static String JACKRABBIT_REMOTE_DEFAULT_WORKSPACE = ClientDavexRepositoryServiceFactory.PARAM_WORKSPACE_NAME_DEFAULT;
+
+       @SuppressWarnings("rawtypes")
+       @Override
+       public Repository getRepository(Map parameters) throws RepositoryException {
+               RepositoryServiceFactory repositoryServiceFactory = new ClientDavexRepositoryServiceFactory();
+               return RepositoryImpl
+                               .create(new Jcr2spiRepositoryFactory.RepositoryConfigImpl(repositoryServiceFactory, parameters));
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryService.java b/org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryService.java
new file mode 100644 (file)
index 0000000..0f9db87
--- /dev/null
@@ -0,0 +1,40 @@
+package org.argeo.jackrabbit.client;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.protocol.HttpContext;
+import org.apache.jackrabbit.spi.SessionInfo;
+import org.apache.jackrabbit.spi2davex.BatchReadConfig;
+import org.apache.jackrabbit.spi2davex.RepositoryServiceImpl;
+
+/**
+ * Wrapper for {@link RepositoryServiceImpl} in order to access the underlying
+ * {@link HttpClientContext}.
+ */
+public class ClientDavexRepositoryService extends RepositoryServiceImpl {
+
+       public ClientDavexRepositoryService(String jcrServerURI, BatchReadConfig batchReadConfig)
+                       throws RepositoryException {
+               super(jcrServerURI, batchReadConfig);
+       }
+
+       public ClientDavexRepositoryService(String jcrServerURI, String defaultWorkspaceName,
+                       BatchReadConfig batchReadConfig, int itemInfoCacheSize, int maximumHttpConnections)
+                       throws RepositoryException {
+               super(jcrServerURI, defaultWorkspaceName, batchReadConfig, itemInfoCacheSize, maximumHttpConnections);
+       }
+
+       public ClientDavexRepositoryService(String jcrServerURI, String defaultWorkspaceName,
+                       BatchReadConfig batchReadConfig, int itemInfoCacheSize) throws RepositoryException {
+               super(jcrServerURI, defaultWorkspaceName, batchReadConfig, itemInfoCacheSize);
+       }
+
+       @Override
+       protected HttpContext getContext(SessionInfo sessionInfo) throws RepositoryException {
+               HttpClientContext result = HttpClientContext.create();
+               result.setAuthCache(new NonSerialBasicAuthCache());
+               return result;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryServiceFactory.java b/org.argeo.core/src/org/argeo/jackrabbit/client/ClientDavexRepositoryServiceFactory.java
new file mode 100644 (file)
index 0000000..4b240f0
--- /dev/null
@@ -0,0 +1,82 @@
+package org.argeo.jackrabbit.client;
+
+import java.util.Map;
+
+import javax.jcr.RepositoryException;
+
+import org.apache.jackrabbit.spi.RepositoryService;
+import org.apache.jackrabbit.spi.commons.ItemInfoCacheImpl;
+import org.apache.jackrabbit.spi2davex.BatchReadConfig;
+import org.apache.jackrabbit.spi2davex.Spi2davexRepositoryServiceFactory;
+
+/**
+ * Wrapper for {@link Spi2davexRepositoryServiceFactory} in order to create a
+ * {@link ClientDavexRepositoryService}.
+ */
+public class ClientDavexRepositoryServiceFactory extends Spi2davexRepositoryServiceFactory {
+       @Override
+       public RepositoryService createRepositoryService(Map<?, ?> parameters) throws RepositoryException {
+               // retrieve the repository uri
+               String uri;
+               if (parameters == null) {
+                       uri = System.getProperty(PARAM_REPOSITORY_URI);
+               } else {
+                       Object repoUri = parameters.get(PARAM_REPOSITORY_URI);
+                       uri = (repoUri == null) ? null : repoUri.toString();
+               }
+               if (uri == null) {
+                       uri = DEFAULT_REPOSITORY_URI;
+               }
+
+               // load other optional configuration parameters
+               BatchReadConfig brc = null;
+               int itemInfoCacheSize = ItemInfoCacheImpl.DEFAULT_CACHE_SIZE;
+               int maximumHttpConnections = 0;
+
+               // since JCR-4120 the default workspace name is no longer set to 'default'
+               // note: if running with JCR Server < 1.5 a default workspace name must
+               // therefore be configured
+               String workspaceNameDefault = null;
+
+               if (parameters != null) {
+                       // batchRead config
+                       Object param = parameters.get(PARAM_BATCHREAD_CONFIG);
+                       if (param != null && param instanceof BatchReadConfig) {
+                               brc = (BatchReadConfig) param;
+                       }
+
+                       // itemCache size config
+                       param = parameters.get(PARAM_ITEMINFO_CACHE_SIZE);
+                       if (param != null) {
+                               try {
+                                       itemInfoCacheSize = Integer.parseInt(param.toString());
+                               } catch (NumberFormatException e) {
+                                       // ignore, use default
+                               }
+                       }
+
+                       // max connections config
+                       param = parameters.get(PARAM_MAX_CONNECTIONS);
+                       if (param != null) {
+                               try {
+                                       maximumHttpConnections = Integer.parseInt(param.toString());
+                               } catch (NumberFormatException e) {
+                                       // using default
+                               }
+                       }
+
+                       param = parameters.get(PARAM_WORKSPACE_NAME_DEFAULT);
+                       if (param != null) {
+                               workspaceNameDefault = param.toString();
+                       }
+               }
+
+               if (maximumHttpConnections > 0) {
+                       return new ClientDavexRepositoryService(uri, workspaceNameDefault, brc, itemInfoCacheSize,
+                                       maximumHttpConnections);
+               } else {
+                       return new ClientDavexRepositoryService(uri, workspaceNameDefault, brc, itemInfoCacheSize);
+               }
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/client/JackrabbitClient.java b/org.argeo.core/src/org/argeo/jackrabbit/client/JackrabbitClient.java
new file mode 100644 (file)
index 0000000..e08f4d6
--- /dev/null
@@ -0,0 +1,125 @@
+package org.argeo.jackrabbit.client;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.jcr.Node;
+import javax.jcr.NodeIterator;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.RepositoryFactory;
+import javax.jcr.Session;
+
+import org.apache.http.client.protocol.HttpClientContext;
+import org.apache.http.protocol.HttpContext;
+import org.apache.jackrabbit.jcr2dav.Jcr2davRepositoryFactory;
+import org.apache.jackrabbit.jcr2spi.Jcr2spiRepositoryFactory;
+import org.apache.jackrabbit.jcr2spi.RepositoryImpl;
+import org.apache.jackrabbit.spi.RepositoryService;
+import org.apache.jackrabbit.spi.RepositoryServiceFactory;
+import org.apache.jackrabbit.spi.SessionInfo;
+import org.apache.jackrabbit.spi.commons.ItemInfoCacheImpl;
+import org.apache.jackrabbit.spi2davex.BatchReadConfig;
+import org.apache.jackrabbit.spi2davex.RepositoryServiceImpl;
+import org.apache.jackrabbit.spi2davex.Spi2davexRepositoryServiceFactory;
+import org.argeo.jcr.JcrUtils;
+
+/** Minimal client to test JCR DAVEX connectivity. */
+public class JackrabbitClient {
+       final static String JACKRABBIT_REPOSITORY_URI = "org.apache.jackrabbit.repository.uri";
+       final static String JACKRABBIT_DAVEX_URI = "org.apache.jackrabbit.spi2davex.uri";
+       final static String JACKRABBIT_REMOTE_DEFAULT_WORKSPACE = "org.apache.jackrabbit.spi2davex.WorkspaceNameDefault";
+
+       public static void main(String[] args) {
+               String repoUri = args.length == 0 ? "http://root:demo@localhost:7070/jcr/ego" : args[0];
+               String workspace = args.length < 2 ? "home" : args[1];
+
+               Repository repository = null;
+               Session session = null;
+
+               URI uri;
+               try {
+                       uri = new URI(repoUri);
+               } catch (URISyntaxException e1) {
+                       throw new IllegalArgumentException(e1);
+               }
+
+               if (uri.getScheme().equals("http") || uri.getScheme().equals("https")) {
+
+                       RepositoryFactory repositoryFactory = new Jcr2davRepositoryFactory() {
+                               @SuppressWarnings("rawtypes")
+                               public Repository getRepository(Map parameters) throws RepositoryException {
+                                       RepositoryServiceFactory repositoryServiceFactory = new Spi2davexRepositoryServiceFactory() {
+
+                                               @Override
+                                               public RepositoryService createRepositoryService(Map<?, ?> parameters)
+                                                               throws RepositoryException {
+                                                       Object uri = parameters.get(JACKRABBIT_DAVEX_URI);
+                                                       Object defaultWorkspace = parameters.get(JACKRABBIT_REMOTE_DEFAULT_WORKSPACE);
+                                                       BatchReadConfig brc = null;
+                                                       return new RepositoryServiceImpl(uri.toString(), defaultWorkspace.toString(), brc,
+                                                                       ItemInfoCacheImpl.DEFAULT_CACHE_SIZE) {
+
+                                                               @Override
+                                                               protected HttpContext getContext(SessionInfo sessionInfo) throws RepositoryException {
+                                                                       HttpClientContext result = HttpClientContext.create();
+                                                                       result.setAuthCache(new NonSerialBasicAuthCache());
+                                                                       return result;
+                                                               }
+
+                                                       };
+                                               }
+                                       };
+                                       return RepositoryImpl.create(
+                                                       new Jcr2spiRepositoryFactory.RepositoryConfigImpl(repositoryServiceFactory, parameters));
+                               }
+                       };
+                       Map<String, String> params = new HashMap<String, String>();
+                       params.put(JACKRABBIT_DAVEX_URI, repoUri.toString());
+                       // FIXME make it configurable
+                       params.put(JACKRABBIT_REMOTE_DEFAULT_WORKSPACE, "sys");
+
+                       try {
+                               repository = repositoryFactory.getRepository(params);
+                               if (repository != null)
+                                       session = repository.login(workspace);
+                               else
+                                       throw new IllegalArgumentException("Repository " + repoUri + " not found");
+                       } catch (RepositoryException e) {
+                               e.printStackTrace();
+                       }
+
+               } else {
+                       Path path = Paths.get(uri.getPath());
+               }
+
+               try {
+                       Node rootNode = session.getRootNode();
+                       NodeIterator nit = rootNode.getNodes();
+                       while (nit.hasNext()) {
+                               System.out.println(nit.nextNode().getPath());
+                       }
+
+                       Node newNode = JcrUtils.mkdirs(rootNode, "dir/subdir");
+                       System.out.println("Created folder " + newNode.getPath());
+                       Node newFile = JcrUtils.copyBytesAsFile(newNode, "test.txt", "TEST".getBytes());
+                       System.out.println("Created file " + newFile.getPath());
+                       try (BufferedReader reader = new BufferedReader(new InputStreamReader(JcrUtils.getFileAsStream(newFile)))) {
+                               System.out.println("Read " + reader.readLine());
+                       } catch (IOException e) {
+                               e.printStackTrace();
+                       }
+                       newNode.getParent().remove();
+                       System.out.println("Removed new nodes");
+               } catch (RepositoryException e) {
+                       e.printStackTrace();
+               }
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/client/NonSerialBasicAuthCache.java b/org.argeo.core/src/org/argeo/jackrabbit/client/NonSerialBasicAuthCache.java
new file mode 100644 (file)
index 0000000..3fb0db9
--- /dev/null
@@ -0,0 +1,41 @@
+package org.argeo.jackrabbit.client;
+
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.http.HttpHost;
+import org.apache.http.auth.AuthScheme;
+import org.apache.http.client.AuthCache;
+
+/**
+ * Implementation of {@link AuthCache} which doesn't use serialization, as it is
+ * not supported by GraalVM at this stage.
+ */
+public class NonSerialBasicAuthCache implements AuthCache {
+       private final Map<HttpHost, AuthScheme> cache;
+
+       public NonSerialBasicAuthCache() {
+               cache = new ConcurrentHashMap<HttpHost, AuthScheme>();
+       }
+
+       @Override
+       public void put(HttpHost host, AuthScheme authScheme) {
+               cache.put(host, authScheme);
+       }
+
+       @Override
+       public AuthScheme get(HttpHost host) {
+               return cache.get(host);
+       }
+
+       @Override
+       public void remove(HttpHost host) {
+               cache.remove(host);
+       }
+
+       @Override
+       public void clear() {
+               cache.clear();
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/fs/AbstractJackrabbitFsProvider.java b/org.argeo.core/src/org/argeo/jackrabbit/fs/AbstractJackrabbitFsProvider.java
new file mode 100644 (file)
index 0000000..a2eb983
--- /dev/null
@@ -0,0 +1,7 @@
+package org.argeo.jackrabbit.fs;
+
+import org.argeo.jcr.fs.JcrFileSystemProvider;
+
+public abstract class AbstractJackrabbitFsProvider extends JcrFileSystemProvider {
+
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/fs/DavexFsProvider.java b/org.argeo.core/src/org/argeo/jackrabbit/fs/DavexFsProvider.java
new file mode 100644 (file)
index 0000000..57d348b
--- /dev/null
@@ -0,0 +1,144 @@
+package org.argeo.jackrabbit.fs;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.DirectoryStream;
+import java.nio.file.FileSystem;
+import java.nio.file.FileSystemAlreadyExistsException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryFactory;
+import javax.jcr.Session;
+
+import org.argeo.jackrabbit.client.ClientDavexRepositoryFactory;
+import org.argeo.jcr.fs.JcrFileSystem;
+import org.argeo.jcr.fs.JcrFsException;
+
+/**
+ * A file system provider based on a JCR repository remotely accessed via the
+ * DAVEX protocol.
+ */
+public class DavexFsProvider extends AbstractJackrabbitFsProvider {
+//     final static String JACKRABBIT_REPOSITORY_URI = "org.apache.jackrabbit.repository.uri";
+//     final static String JACKRABBIT_REMOTE_DEFAULT_WORKSPACE = "org.apache.jackrabbit.spi2davex.WorkspaceNameDefault";
+
+       private Map<String, JcrFileSystem> fileSystems = new HashMap<>();
+
+       @Override
+       public String getScheme() {
+               return "davex";
+       }
+
+       @Override
+       public FileSystem newFileSystem(URI uri, Map<String, ?> env) throws IOException {
+               if (uri.getHost() == null)
+                       throw new IllegalArgumentException("An host should be provided");
+               try {
+                       URI repoUri = new URI("http", uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath(), null, null);
+                       String repoKey = repoUri.toString();
+                       if (fileSystems.containsKey(repoKey))
+                               throw new FileSystemAlreadyExistsException("CMS file system already exists for " + repoKey);
+                       RepositoryFactory repositoryFactory = new ClientDavexRepositoryFactory();
+                       return tryGetRepo(repositoryFactory, repoUri, "home");
+               } catch (URISyntaxException e) {
+                       throw new IllegalArgumentException("Cannot open file system " + uri, e);
+               }
+       }
+
+       private JcrFileSystem tryGetRepo(RepositoryFactory repositoryFactory, URI repoUri, String workspace)
+                       throws IOException {
+               Map<String, String> params = new HashMap<String, String>();
+               params.put(ClientDavexRepositoryFactory.JACKRABBIT_DAVEX_URI, repoUri.toString());
+               // FIXME make it configurable
+               params.put(ClientDavexRepositoryFactory.JACKRABBIT_REMOTE_DEFAULT_WORKSPACE, "sys");
+               Repository repository = null;
+               Session session = null;
+               try {
+                       repository = repositoryFactory.getRepository(params);
+                       if (repository != null)
+                               session = repository.login(workspace);
+               } catch (Exception e) {
+                       // silent
+               }
+
+               if (session == null) {
+                       if (repoUri.getPath() == null || repoUri.getPath().equals("/"))
+                               return null;
+                       String repoUriStr = repoUri.toString();
+                       if (repoUriStr.endsWith("/"))
+                               repoUriStr = repoUriStr.substring(0, repoUriStr.length() - 1);
+                       String nextRepoUriStr = repoUriStr.substring(0, repoUriStr.lastIndexOf('/'));
+                       String nextWorkspace = repoUriStr.substring(repoUriStr.lastIndexOf('/') + 1);
+                       URI nextUri;
+                       try {
+                               nextUri = new URI(nextRepoUriStr);
+                       } catch (URISyntaxException e) {
+                               throw new IllegalArgumentException("Badly formatted URI", e);
+                       }
+                       return tryGetRepo(repositoryFactory, nextUri, nextWorkspace);
+               } else {
+                       JcrFileSystem fileSystem = new JcrFileSystem(this, repository);
+                       fileSystems.put(repoUri.toString() + "/" + workspace, fileSystem);
+                       return fileSystem;
+               }
+       }
+
+       @Override
+       public FileSystem getFileSystem(URI uri) {
+               return currentUserFileSystem(uri);
+       }
+
+       @Override
+       public Path getPath(URI uri) {
+               JcrFileSystem fileSystem = currentUserFileSystem(uri);
+               if (fileSystem == null)
+                       try {
+                               fileSystem = (JcrFileSystem) newFileSystem(uri, new HashMap<String, Object>());
+                       } catch (IOException e) {
+                               throw new JcrFsException("Could not autocreate file system", e);
+                       }
+               URI repoUri = null;
+               try {
+                       repoUri = new URI("http", uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath(), null, null);
+               } catch (URISyntaxException e) {
+                       throw new IllegalArgumentException(e);
+               }
+               String uriStr = repoUri.toString();
+               String localPath = null;
+               for (String key : fileSystems.keySet()) {
+                       if (uriStr.startsWith(key)) {
+                               localPath = uriStr.toString().substring(key.length());
+                       }
+               }
+               if ("".equals(localPath))
+                       localPath = "/";
+               return fileSystem.getPath(localPath);
+       }
+
+       private JcrFileSystem currentUserFileSystem(URI uri) {
+               for (String key : fileSystems.keySet()) {
+                       if (uri.toString().startsWith(key))
+                               return fileSystems.get(key);
+               }
+               return null;
+       }
+
+       public static void main(String args[]) {
+               try {
+                       DavexFsProvider fsProvider = new DavexFsProvider();
+                       Path path = fsProvider.getPath(new URI("davex://root:demo@localhost:7070/jcr/ego/"));
+                       System.out.println(path);
+                       DirectoryStream<Path> ds = Files.newDirectoryStream(path);
+                       for (Path p : ds) {
+                               System.out.println("- " + p);
+                       }
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/fs/JackrabbitMemoryFsProvider.java b/org.argeo.core/src/org/argeo/jackrabbit/fs/JackrabbitMemoryFsProvider.java
new file mode 100644 (file)
index 0000000..e3a70d0
--- /dev/null
@@ -0,0 +1,87 @@
+package org.argeo.jackrabbit.fs;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.nio.file.FileSystem;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.jcr.Credentials;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.SimpleCredentials;
+
+import org.apache.jackrabbit.core.RepositoryImpl;
+import org.apache.jackrabbit.core.config.RepositoryConfig;
+import org.argeo.jcr.fs.JcrFileSystem;
+import org.argeo.jcr.fs.JcrFsException;
+
+public class JackrabbitMemoryFsProvider extends AbstractJackrabbitFsProvider {
+       private RepositoryImpl repository;
+       private JcrFileSystem fileSystem;
+
+       private Credentials credentials;
+
+       public JackrabbitMemoryFsProvider() {
+               String username = System.getProperty("user.name");
+               credentials = new SimpleCredentials(username, username.toCharArray());
+       }
+
+       @Override
+       public String getScheme() {
+               return "jcr+memory";
+       }
+
+       @Override
+       public FileSystem newFileSystem(URI uri, Map<String, ?> env) throws IOException {
+               try {
+                       Path tempDir = Files.createTempDirectory("fs-memory");
+                       URL confUrl = JackrabbitMemoryFsProvider.class.getResource("fs-memory.xml");
+                       RepositoryConfig repositoryConfig = RepositoryConfig.create(confUrl.toURI(), tempDir.toString());
+                       repository = RepositoryImpl.create(repositoryConfig);
+                       postRepositoryCreation(repository);
+                       fileSystem = new JcrFileSystem(this, repository, credentials);
+                       return fileSystem;
+               } catch (RepositoryException | URISyntaxException e) {
+                       throw new IOException("Cannot login to repository", e);
+               }
+       }
+
+       @Override
+       public FileSystem getFileSystem(URI uri) {
+               return fileSystem;
+       }
+
+       @Override
+       public Path getPath(URI uri) {
+               String path = uri.getPath();
+               if (fileSystem == null)
+                       try {
+                               newFileSystem(uri, new HashMap<String, Object>());
+                       } catch (IOException e) {
+                               throw new JcrFsException("Could not autocreate file system", e);
+                       }
+               return fileSystem.getPath(path);
+       }
+
+       public Repository getRepository() {
+               return repository;
+       }
+
+       public Session login() throws RepositoryException {
+               return getRepository().login(credentials);
+       }
+
+       /**
+        * Called after the repository has been created and before the file system is
+        * created.
+        */
+       protected void postRepositoryCreation(RepositoryImpl repositoryImpl) throws RepositoryException {
+
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/fs/fs-memory.xml b/org.argeo.core/src/org/argeo/jackrabbit/fs/fs-memory.xml
new file mode 100644 (file)
index 0000000..f2541fb
--- /dev/null
@@ -0,0 +1,57 @@
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+       <!-- File system and datastore -->
+       <FileSystem
+               class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+
+       <!-- Workspace templates -->
+       <Workspaces rootPath="${rep.home}/workspaces"
+               defaultWorkspace="main" configRootPath="/workspaces" />
+       <Workspace name="${wsp.name}">
+               <FileSystem
+                       class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+               </PersistenceManager>
+               <SearchIndex
+                       class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+                       <param name="path" value="${wsp.home}/index" />
+                       <param name="directoryManagerClass"
+                               value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
+                       <param name="extractorPoolSize" value="0" />
+                       <FileSystem
+                               class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               </SearchIndex>
+       </Workspace>
+
+       <!-- Versioning -->
+       <Versioning rootPath="${rep.home}/version">
+               <FileSystem
+                       class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+               </PersistenceManager>
+       </Versioning>
+
+       <!-- Indexing -->
+       <SearchIndex
+               class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+               <param name="path" value="${rep.home}/index" />
+               <param name="directoryManagerClass"
+                       value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
+               <param name="extractorPoolSize" value="0" />
+               <FileSystem
+                       class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+       </SearchIndex>
+
+       <!-- Security -->
+       <Security appName="Jackrabbit">
+               <LoginModule
+                       class="org.apache.jackrabbit.core.security.SimpleLoginModule" />
+               <!-- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager" -->
+               <!-- workspaceName="security" /> -->
+               <!-- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" 
+                       /> -->
+       </Security>
+</Repository>
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/fs/package-info.java b/org.argeo.core/src/org/argeo/jackrabbit/fs/package-info.java
new file mode 100644 (file)
index 0000000..c9ec2c3
--- /dev/null
@@ -0,0 +1,2 @@
+/** Java NIO file system implementation based on Jackrabbit. */
+package org.argeo.jackrabbit.fs;
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/package-info.java b/org.argeo.core/src/org/argeo/jackrabbit/package-info.java
new file mode 100644 (file)
index 0000000..17497d6
--- /dev/null
@@ -0,0 +1,2 @@
+/** Generic Jackrabbit utilities. */
+package org.argeo.jackrabbit;
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/repository-h2.xml b/org.argeo.core/src/org/argeo/jackrabbit/repository-h2.xml
new file mode 100644 (file)
index 0000000..0526762
--- /dev/null
@@ -0,0 +1,82 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+       <!-- Shared datasource -->
+       <DataSources>
+               <DataSource name="dataSource">
+                       <param name="driver" value="org.h2.Driver" />
+                       <param name="url" value="${dburl}" />
+                       <param name="user" value="${dbuser}" />
+                       <param name="password" value="${dbpassword}" />
+                       <param name="databaseType" value="h2" />
+                       <param name="maxPoolSize" value="${maxPoolSize}" />
+               </DataSource>
+       </DataSources>
+
+       <!-- File system and datastore -->
+       <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+               <param name="dataSourceName" value="dataSource" />
+               <param name="schema" value="default" />
+               <param name="schemaObjectPrefix" value="fs_" />
+       </FileSystem>
+       <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
+               <param name="path" value="${rep.home}/datastore" />
+       </DataStore>
+
+       <!-- Workspace templates -->
+       <Workspaces rootPath="${rep.home}/workspaces"
+               defaultWorkspace="${defaultWorkspace}" />
+       <Workspace name="${wsp.name}">
+               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schema" value="default" />
+                       <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+                       <param name="path" value="${wsp.home}/index" />
+                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
+                       <param name="cacheSize" value="${searchCacheSize}" />
+                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+               </SearchIndex>
+               <WorkspaceSecurity>
+                       <AccessControlProvider
+                               class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+               </WorkspaceSecurity>
+       </Workspace>
+
+       <!-- Versioning -->
+       <Versioning rootPath="${rep.home}/version">
+               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schema" value="default" />
+                       <param name="schemaObjectPrefix" value="fs_ver_" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schemaObjectPrefix" value="pm_ver_" />
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+       </Versioning>
+
+       <!-- Indexing -->
+       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+               <param name="path" value="${rep.home}/index" />
+               <param name="extractorPoolSize" value="${extractorPoolSize}" />
+               <param name="cacheSize" value="${searchCacheSize}" />
+               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+       </SearchIndex>
+
+       <!-- Security -->
+       <Security appName="Jackrabbit">
+               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+                       workspaceName="security" />
+               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+       </Security>
+</Repository>
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/repository-localfs.xml b/org.argeo.core/src/org/argeo/jackrabbit/repository-localfs.xml
new file mode 100644 (file)
index 0000000..3d24708
--- /dev/null
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+       <!-- File system and datastore -->
+       <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
+               <param name="path" value="${rep.home}/repository" />
+       </FileSystem>
+       <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
+               <param name="path" value="${rep.home}/datastore" />
+       </DataStore>
+
+       <!-- Workspace templates -->
+       <Workspaces rootPath="${rep.home}/workspaces"
+               defaultWorkspace="${defaultWorkspace}" />
+       <Workspace name="${wsp.name}">
+               <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
+                       <param name="path" value="${wsp.home}" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+                       <param name="path" value="${wsp.home}/index" />
+                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
+                       <param name="cacheSize" value="${searchCacheSize}" />
+                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+               </SearchIndex>
+               <WorkspaceSecurity>
+                       <AccessControlProvider
+                               class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+               </WorkspaceSecurity>
+       </Workspace>
+
+       <!-- Versioning -->
+       <Versioning rootPath="${rep.home}/version">
+               <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
+                       <param name="path" value="${rep.home}/version" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+       </Versioning>
+
+       <!-- Indexing -->
+       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+               <param name="path" value="${rep.home}/index" />
+               <param name="extractorPoolSize" value="${extractorPoolSize}" />
+               <param name="cacheSize" value="${searchCacheSize}" />
+               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+       </SearchIndex>
+
+       <!-- Security -->
+       <Security appName="Jackrabbit">
+               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+                       workspaceName="security" />
+               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+       </Security>
+</Repository>
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/repository-memory.xml b/org.argeo.core/src/org/argeo/jackrabbit/repository-memory.xml
new file mode 100644 (file)
index 0000000..ecee5bd
--- /dev/null
@@ -0,0 +1,55 @@
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+       <!-- File system and datastore -->
+       <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+
+       <!-- Workspace templates -->
+       <Workspaces rootPath="${rep.home}/workspaces"
+               defaultWorkspace="${defaultWorkspace}" configRootPath="/workspaces" />
+       <Workspace name="${wsp.name}">
+               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+                       <param name="blobFSBlockSize" value="1" />
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+                       <param name="path" value="${wsp.home}/index" />
+                       <param name="directoryManagerClass"
+                               value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
+                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
+                       <param name="cacheSize" value="${searchCacheSize}" />
+                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+                       <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               </SearchIndex>
+       </Workspace>
+
+       <!-- Versioning -->
+       <Versioning rootPath="${rep.home}/version">
+               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+                       <param name="blobFSBlockSize" value="1" />
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+       </Versioning>
+
+       <!-- Indexing -->
+       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+               <param name="path" value="${rep.home}/index" />
+               <param name="directoryManagerClass"
+                       value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
+               <param name="extractorPoolSize" value="${extractorPoolSize}" />
+               <param name="cacheSize" value="${searchCacheSize}" />
+               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+       </SearchIndex>
+
+       <!-- Security -->
+       <Security appName="Jackrabbit">
+               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+                       workspaceName="security" />
+               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+       </Security>
+</Repository>
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/repository-postgresql-ds.xml b/org.argeo.core/src/org/argeo/jackrabbit/repository-postgresql-ds.xml
new file mode 100644 (file)
index 0000000..07a0d04
--- /dev/null
@@ -0,0 +1,82 @@
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+       <!-- Shared datasource -->
+       <DataSources>
+               <DataSource name="dataSource">
+                       <param name="driver" value="org.postgresql.Driver" />
+                       <param name="url" value="${dburl}" />
+                       <param name="user" value="${dbuser}" />
+                       <param name="password" value="${dbpassword}" />
+                       <param name="databaseType" value="postgresql" />
+                       <param name="maxPoolSize" value="${maxPoolSize}" />
+               </DataSource>
+       </DataSources>
+
+       <!-- File system and datastore -->
+       <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+               <param name="dataSourceName" value="dataSource" />
+               <param name="schema" value="postgresql" />
+               <param name="schemaObjectPrefix" value="fs_" />
+       </FileSystem>
+       <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
+               <param name="path" value="${rep.home}/datastore" />
+       </DataStore>
+
+       <!-- Workspace templates -->
+       <Workspaces rootPath="${rep.home}/workspaces"
+               defaultWorkspace="${defaultWorkspace}" />
+       <Workspace name="${wsp.name}">
+               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schema" value="postgresql" />
+                       <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+                       <param name="path" value="${wsp.home}/index" />
+                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
+                       <param name="cacheSize" value="${searchCacheSize}" />
+                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+               </SearchIndex>
+               <WorkspaceSecurity>
+                       <AccessControlProvider
+                               class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+               </WorkspaceSecurity>
+       </Workspace>
+
+       <!-- Versioning -->
+       <Versioning rootPath="${rep.home}/version">
+               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schema" value="postgresql" />
+                       <param name="schemaObjectPrefix" value="fs_ver_" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schemaObjectPrefix" value="pm_ver_" />
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+       </Versioning>
+
+       <!-- Indexing -->
+       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+               <param name="path" value="${rep.home}/index" />
+               <param name="extractorPoolSize" value="${extractorPoolSize}" />
+               <param name="cacheSize" value="${searchCacheSize}" />
+               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+       </SearchIndex>
+
+       <!-- Security -->
+       <Security appName="Jackrabbit">
+               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+                       workspaceName="security" />
+               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+       </Security>
+</Repository>
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/repository-postgresql.xml b/org.argeo.core/src/org/argeo/jackrabbit/repository-postgresql.xml
new file mode 100644 (file)
index 0000000..9677828
--- /dev/null
@@ -0,0 +1,79 @@
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
+<Repository>
+       <!-- Shared datasource -->
+       <DataSources>
+               <DataSource name="dataSource">
+                       <param name="driver" value="org.postgresql.Driver" />
+                       <param name="url" value="${dburl}" />
+                       <param name="user" value="${dbuser}" />
+                       <param name="password" value="${dbpassword}" />
+                       <param name="databaseType" value="postgresql" />
+                       <param name="maxPoolSize" value="${maxPoolSize}" />
+               </DataSource>
+       </DataSources>
+
+       <!-- File system and datastore -->
+       <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+               <param name="dataSourceName" value="dataSource" />
+               <param name="schema" value="postgresql" />
+               <param name="schemaObjectPrefix" value="fs_" />
+       </FileSystem>
+
+       <!-- Workspace templates -->
+       <Workspaces rootPath="${rep.home}/workspaces"
+               defaultWorkspace="${defaultWorkspace}" />
+       <Workspace name="${wsp.name}">
+               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schema" value="postgresql" />
+                       <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+                       <param name="path" value="${wsp.home}/index" />
+                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
+                       <param name="cacheSize" value="${searchCacheSize}" />
+                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+               </SearchIndex>
+               <WorkspaceSecurity>
+                       <AccessControlProvider
+                               class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
+               </WorkspaceSecurity>
+       </Workspace>
+
+       <!-- Versioning -->
+       <Versioning rootPath="${rep.home}/version">
+               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schema" value="postgresql" />
+                       <param name="schemaObjectPrefix" value="fs_ver_" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schemaObjectPrefix" value="pm_ver_" />
+                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
+               </PersistenceManager>
+       </Versioning>
+
+       <!-- Indexing -->
+       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+               <param name="path" value="${rep.home}/index" />
+               <param name="extractorPoolSize" value="${extractorPoolSize}" />
+               <param name="cacheSize" value="${searchCacheSize}" />
+               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
+       </SearchIndex>
+
+       <!-- Security -->
+       <Security appName="Jackrabbit">
+               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
+                       workspaceName="security" />
+               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
+       </Security>
+</Repository>
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/security/JackrabbitSecurityUtils.java b/org.argeo.core/src/org/argeo/jackrabbit/security/JackrabbitSecurityUtils.java
new file mode 100644 (file)
index 0000000..a75c795
--- /dev/null
@@ -0,0 +1,80 @@
+package org.argeo.jackrabbit.security;
+
+import java.security.Principal;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.security.Privilege;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.jackrabbit.api.security.JackrabbitAccessControlList;
+import org.apache.jackrabbit.api.security.JackrabbitAccessControlManager;
+import org.argeo.jcr.JcrUtils;
+
+/** Utilities around Jackrabbit security extensions. */
+public class JackrabbitSecurityUtils {
+       private final static Log log = LogFactory.getLog(JackrabbitSecurityUtils.class);
+
+       /**
+        * Convenience method for denying a single privilege to a principal (user or
+        * role), typically jcr:all
+        */
+       public synchronized static void denyPrivilege(Session session, String path, String principal, String privilege)
+                       throws RepositoryException {
+               List<Privilege> privileges = new ArrayList<Privilege>();
+               privileges.add(session.getAccessControlManager().privilegeFromName(privilege));
+               denyPrivileges(session, path, () -> principal, privileges);
+       }
+
+       /**
+        * Deny privileges on a path to a {@link Principal}. The path must already
+        * exist. Session is saved. Synchronized to prevent concurrent modifications of
+        * the same node.
+        */
+       public synchronized static Boolean denyPrivileges(Session session, String path, Principal principal,
+                       List<Privilege> privs) throws RepositoryException {
+               // make sure the session is in line with the persisted state
+               session.refresh(false);
+               JackrabbitAccessControlManager acm = (JackrabbitAccessControlManager) session.getAccessControlManager();
+               JackrabbitAccessControlList acl = (JackrabbitAccessControlList) JcrUtils.getAccessControlList(acm, path);
+
+//             accessControlEntries: for (AccessControlEntry ace : acl.getAccessControlEntries()) {
+//                     Principal currentPrincipal = ace.getPrincipal();
+//                     if (currentPrincipal.getName().equals(principal.getName())) {
+//                             Privilege[] currentPrivileges = ace.getPrivileges();
+//                             if (currentPrivileges.length != privs.size())
+//                                     break accessControlEntries;
+//                             for (int i = 0; i < currentPrivileges.length; i++) {
+//                                     Privilege currP = currentPrivileges[i];
+//                                     Privilege p = privs.get(i);
+//                                     if (!currP.getName().equals(p.getName())) {
+//                                             break accessControlEntries;
+//                                     }
+//                             }
+//                             return false;
+//                     }
+//             }
+
+               Privilege[] privileges = privs.toArray(new Privilege[privs.size()]);
+               acl.addEntry(principal, privileges, false);
+               acm.setPolicy(path, acl);
+               if (log.isDebugEnabled()) {
+                       StringBuffer privBuf = new StringBuffer();
+                       for (Privilege priv : privs)
+                               privBuf.append(priv.getName());
+                       log.debug("Denied privileges " + privBuf + " to " + principal.getName() + " on " + path + " in '"
+                                       + session.getWorkspace().getName() + "'");
+               }
+               session.refresh(true);
+               session.save();
+               return true;
+       }
+
+       /** Singleton. */
+       private JackrabbitSecurityUtils() {
+
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/security/package-info.java b/org.argeo.core/src/org/argeo/jackrabbit/security/package-info.java
new file mode 100644 (file)
index 0000000..f3a282c
--- /dev/null
@@ -0,0 +1,2 @@
+/** Generic Jackrabbit security utilities. */
+package org.argeo.jackrabbit.security;
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/unit/AbstractJackrabbitTestCase.java b/org.argeo.core/src/org/argeo/jackrabbit/unit/AbstractJackrabbitTestCase.java
new file mode 100644 (file)
index 0000000..f65432e
--- /dev/null
@@ -0,0 +1,51 @@
+package org.argeo.jackrabbit.unit;
+
+import java.net.URL;
+
+import javax.jcr.Repository;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.jackrabbit.core.RepositoryImpl;
+import org.apache.jackrabbit.core.config.RepositoryConfig;
+import org.argeo.jcr.unit.AbstractJcrTestCase;
+
+/** Factorizes configuration of an in memory transient repository */
+public abstract class AbstractJackrabbitTestCase extends AbstractJcrTestCase {
+       protected RepositoryImpl repositoryImpl;
+
+       // protected File getRepositoryFile() throws Exception {
+       // Resource res = new ClassPathResource(
+       // "org/argeo/jackrabbit/unit/repository-memory.xml");
+       // return res.getFile();
+       // }
+
+       public AbstractJackrabbitTestCase() {
+               URL url = AbstractJackrabbitTestCase.class.getResource("jaas.config");
+               assert url != null;
+               System.setProperty("java.security.auth.login.config", url.toString());
+       }
+
+       protected Repository createRepository() throws Exception {
+               // Repository repository = new TransientRepository(getRepositoryFile(),
+               // getHomeDir());
+               RepositoryConfig repositoryConfig = RepositoryConfig.create(
+                               AbstractJackrabbitTestCase.class
+                                               .getResourceAsStream(getRepositoryConfigResource()),
+                               getHomeDir().getAbsolutePath());
+               RepositoryImpl repositoryImpl = RepositoryImpl.create(repositoryConfig);
+               return repositoryImpl;
+       }
+
+       protected String getRepositoryConfigResource() {
+               return "repository-memory.xml";
+       }
+
+       @Override
+       protected void clearRepository(Repository repository) throws Exception {
+               RepositoryImpl repositoryImpl = (RepositoryImpl) repository;
+               if (repositoryImpl != null)
+                       repositoryImpl.shutdown();
+               FileUtils.deleteDirectory(getHomeDir());
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/unit/jaas.config b/org.argeo.core/src/org/argeo/jackrabbit/unit/jaas.config
new file mode 100644 (file)
index 0000000..0313f91
--- /dev/null
@@ -0,0 +1,7 @@
+TEST_JACKRABBIT_ADMIN {
+   org.argeo.cms.auth.DataAdminLoginModule requisite;
+};
+
+Jackrabbit {
+   org.argeo.security.jackrabbit.SystemJackrabbitLoginModule requisite;
+};
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/unit/package-info.java b/org.argeo.core/src/org/argeo/jackrabbit/unit/package-info.java
new file mode 100644 (file)
index 0000000..3b6143b
--- /dev/null
@@ -0,0 +1,2 @@
+/** Helpers for unit tests with Jackrabbit repositories. */
+package org.argeo.jackrabbit.unit;
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/unit/repository-h2.xml b/org.argeo.core/src/org/argeo/jackrabbit/unit/repository-h2.xml
new file mode 100644 (file)
index 0000000..348dc28
--- /dev/null
@@ -0,0 +1,81 @@
+<?xml version="1.0"?>
+<!DOCTYPE Repository PUBLIC "-//The Apache Software Foundation//DTD Jackrabbit 1.6//EN"
+                            "http://jackrabbit.apache.org/dtd/repository-2.0.dtd">
+<Repository>
+       <!-- Shared datasource -->
+       <DataSources>
+               <DataSource name="dataSource">
+                       <param name="driver" value="org.h2.Driver" />
+                       <param name="url" value="jdbc:h2:mem:jackrabbit" />
+                       <param name="user" value="sa" />
+                       <param name="password" value="" />
+                       <param name="databaseType" value="h2" />
+                       <param name="maxPoolSize" value="10" />
+               </DataSource>
+       </DataSources>
+
+       <!-- File system and datastore -->
+       <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+               <param name="dataSourceName" value="dataSource" />
+               <param name="schema" value="default" />
+               <param name="schemaObjectPrefix" value="fs_" />
+       </FileSystem>
+       <DataStore class="org.apache.jackrabbit.core.data.db.DbDataStore">
+               <param name="dataSourceName" value="dataSource" />
+               <param name="schemaObjectPrefix" value="ds_" />
+       </DataStore>
+
+       <!-- Workspace templates -->
+       <Workspaces rootPath="${rep.home}/workspaces"
+               defaultWorkspace="dev" />
+       <Workspace name="${wsp.name}">
+               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schema" value="default" />
+                       <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
+               </PersistenceManager>
+               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+                       <param name="path" value="${wsp.home}/index" />
+               </SearchIndex>
+       </Workspace>
+
+       <!-- Versioning -->
+       <Versioning rootPath="${rep.home}/version">
+               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schema" value="default" />
+                       <param name="schemaObjectPrefix" value="fs_ver_" />
+               </FileSystem>
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
+                       <param name="dataSourceName" value="dataSource" />
+                       <param name="schemaObjectPrefix" value="pm_ver_" />
+               </PersistenceManager>
+       </Versioning>
+
+       <!-- Indexing -->
+       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+               <param name="path" value="${rep.home}/repository/index" />
+               <param name="extractorPoolSize" value="2" />
+               <param name="supportHighlighting" value="true" />
+       </SearchIndex>
+
+       <!-- Security -->
+       <Security appName="Jackrabbit">
+               <SecurityManager
+                       class="org.apache.jackrabbit.core.security.simple.SimpleSecurityManager"
+                       workspaceName="security" />
+               <AccessManager
+                       class="org.apache.jackrabbit.core.security.simple.SimpleAccessManager" />
+               <LoginModule
+                       class="org.apache.jackrabbit.core.security.simple.SimpleLoginModule">
+                       <param name="anonymousId" value="anonymous" />
+                       <param name="adminId" value="admin" />
+               </LoginModule>
+       </Security>
+</Repository>
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jackrabbit/unit/repository-memory.xml b/org.argeo.core/src/org/argeo/jackrabbit/unit/repository-memory.xml
new file mode 100644 (file)
index 0000000..8395424
--- /dev/null
@@ -0,0 +1,72 @@
+<?xml version="1.0"?>
+<!--
+
+    Copyright (C) 2007-2012 Argeo GmbH
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+            http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+
+-->
+<!DOCTYPE Repository PUBLIC "-//The Apache Software Foundation//DTD Jackrabbit 1.6//EN"
+                            "http://jackrabbit.apache.org/dtd/repository-2.0.dtd">
+<Repository>
+       <!-- File system and datastore -->
+       <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+
+       <!-- Workspace templates -->
+       <Workspaces rootPath="${rep.home}/workspaces"
+               defaultWorkspace="main" configRootPath="/workspaces" />
+       <Workspace name="${wsp.name}">
+               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+                       <param name="blobFSBlockSize" value="1" />
+               </PersistenceManager>
+               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+                       <param name="path" value="${rep.home}/repository/index" />
+                       <param name="directoryManagerClass"
+                               value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
+                       <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               </SearchIndex>
+       </Workspace>
+
+       <!-- Versioning -->
+       <Versioning rootPath="${rep.home}/version">
+               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+               <PersistenceManager
+                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
+                       <param name="blobFSBlockSize" value="1" />
+               </PersistenceManager>
+       </Versioning>
+
+       <!-- Indexing -->
+       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
+               <param name="path" value="${rep.home}/repository/index" />
+               <param name="directoryManagerClass"
+                       value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
+               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
+       </SearchIndex>
+
+       <!-- Security -->
+       <Security appName="Jackrabbit">
+               <SecurityManager
+                       class="org.apache.jackrabbit.core.security.simple.SimpleSecurityManager"
+                       workspaceName="security" />
+               <AccessManager
+                       class="org.apache.jackrabbit.core.security.simple.SimpleAccessManager" />
+               <LoginModule
+                       class="org.apache.jackrabbit.core.security.simple.SimpleLoginModule">
+                       <param name="anonymousId" value="anonymous" />
+                       <param name="adminId" value="admin" />
+               </LoginModule>
+       </Security>
+</Repository>
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jcr/proxy/AbstractUrlProxy.java b/org.argeo.core/src/org/argeo/jcr/proxy/AbstractUrlProxy.java
new file mode 100644 (file)
index 0000000..0984276
--- /dev/null
@@ -0,0 +1,154 @@
+package org.argeo.jcr.proxy;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+
+import javax.jcr.Binary;
+import javax.jcr.Node;
+import javax.jcr.Property;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.nodetype.NodeType;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.jcr.JcrException;
+import org.argeo.jcr.JcrUtils;
+
+/** Base class for URL based proxys. */
+public abstract class AbstractUrlProxy implements ResourceProxy {
+       private final static Log log = LogFactory.getLog(AbstractUrlProxy.class);
+
+       private Repository jcrRepository;
+       private Session jcrAdminSession;
+       private String proxyWorkspace = "proxy";
+
+       protected abstract Node retrieve(Session session, String path);
+
+       void init() {
+               try {
+                       jcrAdminSession = JcrUtils.loginOrCreateWorkspace(jcrRepository, proxyWorkspace);
+                       beforeInitSessionSave(jcrAdminSession);
+                       if (jcrAdminSession.hasPendingChanges())
+                               jcrAdminSession.save();
+               } catch (RepositoryException e) {
+                       JcrUtils.discardQuietly(jcrAdminSession);
+                       throw new JcrException("Cannot initialize URL proxy", e);
+               }
+       }
+
+       /**
+        * Called before the (admin) session is saved at the end of the initialization.
+        * Does nothing by default, to be overridden.
+        */
+       protected void beforeInitSessionSave(Session session) throws RepositoryException {
+       }
+
+       void destroy() {
+               JcrUtils.logoutQuietly(jcrAdminSession);
+       }
+
+       /**
+        * Called before the (admin) session is logged out when resources are released.
+        * Does nothing by default, to be overridden.
+        */
+       protected void beforeDestroySessionLogout() throws RepositoryException {
+       }
+
+       public Node proxy(String path) {
+               // we open a JCR session with client credentials in order not to use the
+               // admin session in multiple thread or make it a bottleneck.
+               Node nodeAdmin = null;
+               Node nodeClient = null;
+               Session clientSession = null;
+               try {
+                       clientSession = jcrRepository.login(proxyWorkspace);
+                       if (!clientSession.itemExists(path) || shouldUpdate(clientSession, path)) {
+                               nodeAdmin = retrieveAndSave(path);
+                               if (nodeAdmin != null)
+                                       nodeClient = clientSession.getNode(path);
+                       } else
+                               nodeClient = clientSession.getNode(path);
+                       return nodeClient;
+               } catch (RepositoryException e) {
+                       throw new JcrException("Cannot proxy " + path, e);
+               } finally {
+                       if (nodeClient == null)
+                               JcrUtils.logoutQuietly(clientSession);
+               }
+       }
+
+       protected synchronized Node retrieveAndSave(String path) {
+               try {
+                       Node node = retrieve(jcrAdminSession, path);
+                       if (node == null)
+                               return null;
+                       jcrAdminSession.save();
+                       return node;
+               } catch (RepositoryException e) {
+                       JcrUtils.discardQuietly(jcrAdminSession);
+                       throw new JcrException("Cannot retrieve and save " + path, e);
+               } finally {
+                       notifyAll();
+               }
+       }
+
+       /** Session is not saved */
+       protected synchronized Node proxyUrl(Session session, String remoteUrl, String path) throws RepositoryException {
+               Node node = null;
+               if (session.itemExists(path)) {
+                       // throw new ArgeoJcrException("Node " + path + " already exists");
+               }
+               try (InputStream in = new URL(remoteUrl).openStream()) {
+                       // URL u = new URL(remoteUrl);
+                       // in = u.openStream();
+                       node = importFile(session, path, in);
+               } catch (IOException e) {
+                       if (log.isDebugEnabled()) {
+                               log.debug("Cannot read " + remoteUrl + ", skipping... " + e.getMessage());
+                               // log.trace("Cannot read because of ", e);
+                       }
+                       JcrUtils.discardQuietly(session);
+                       // } finally {
+                       // IOUtils.closeQuietly(in);
+               }
+               return node;
+       }
+
+       protected synchronized Node importFile(Session session, String path, InputStream in) throws RepositoryException {
+               Binary binary = null;
+               try {
+                       Node content = null;
+                       Node node = null;
+                       if (!session.itemExists(path)) {
+                               node = JcrUtils.mkdirs(session, path, NodeType.NT_FILE, NodeType.NT_FOLDER, false);
+                               content = node.addNode(Node.JCR_CONTENT, NodeType.NT_UNSTRUCTURED);
+                       } else {
+                               node = session.getNode(path);
+                               content = node.getNode(Node.JCR_CONTENT);
+                       }
+                       binary = session.getValueFactory().createBinary(in);
+                       content.setProperty(Property.JCR_DATA, binary);
+                       JcrUtils.updateLastModifiedAndParents(node, null, true);
+                       return node;
+               } finally {
+                       JcrUtils.closeQuietly(binary);
+               }
+       }
+
+       /** Whether the file should be updated. */
+       protected Boolean shouldUpdate(Session clientSession, String nodePath) {
+               return false;
+       }
+
+       public void setJcrRepository(Repository jcrRepository) {
+               this.jcrRepository = jcrRepository;
+       }
+
+       public void setProxyWorkspace(String localWorkspace) {
+               this.proxyWorkspace = localWorkspace;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jcr/proxy/ResourceProxy.java b/org.argeo.core/src/org/argeo/jcr/proxy/ResourceProxy.java
new file mode 100644 (file)
index 0000000..84eea1f
--- /dev/null
@@ -0,0 +1,16 @@
+package org.argeo.jcr.proxy;
+
+import javax.jcr.Node;
+
+/** A proxy which nows how to resolve and synchronize relative URLs */
+public interface ResourceProxy {
+       /**
+        * Proxy the file referenced by this relative path in the underlying
+        * repository. A new session is created by each call, so the underlying
+        * session of the returned node must be closed by the caller.
+        * 
+        * @return the proxied Node, <code>null</code> if the resource was not found
+        *         (e.g. HTTP 404)
+        */
+       public Node proxy(String relativePath);
+}
diff --git a/org.argeo.core/src/org/argeo/jcr/proxy/ResourceProxyServlet.java b/org.argeo.core/src/org/argeo/jcr/proxy/ResourceProxyServlet.java
new file mode 100644 (file)
index 0000000..d77bd49
--- /dev/null
@@ -0,0 +1,116 @@
+package org.argeo.jcr.proxy;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+import javax.jcr.Node;
+import javax.jcr.Property;
+import javax.jcr.RepositoryException;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.jcr.JcrException;
+import org.argeo.jcr.Bin;
+import org.argeo.jcr.JcrUtils;
+
+/** Wraps a proxy via HTTP */
+public class ResourceProxyServlet extends HttpServlet {
+       private static final long serialVersionUID = -8886549549223155801L;
+
+       private final static Log log = LogFactory
+                       .getLog(ResourceProxyServlet.class);
+
+       private ResourceProxy proxy;
+
+       private String contentTypeCharset = "UTF-8";
+
+       @Override
+       protected void doGet(HttpServletRequest request,
+                       HttpServletResponse response) throws ServletException, IOException {
+               String path = request.getPathInfo();
+
+               if (log.isTraceEnabled()) {
+                       log.trace("path=" + path);
+                       log.trace("UserPrincipal = " + request.getUserPrincipal().getName());
+                       log.trace("SessionID = " + request.getSession(false).getId());
+                       log.trace("ContextPath = " + request.getContextPath());
+                       log.trace("ServletPath = " + request.getServletPath());
+                       log.trace("PathInfo = " + request.getPathInfo());
+                       log.trace("Method = " + request.getMethod());
+                       log.trace("User-Agent = " + request.getHeader("User-Agent"));
+               }
+
+               Node node = null;
+               try {
+                       node = proxy.proxy(path);
+                       if (node == null)
+                               response.sendError(404);
+                       else
+                               processResponse(node, response);
+               } finally {
+                       if (node != null)
+                               try {
+                                       JcrUtils.logoutQuietly(node.getSession());
+                               } catch (RepositoryException e) {
+                                       // silent
+                               }
+               }
+
+       }
+
+       /** Retrieve the content of the node. */
+       protected void processResponse(Node node, HttpServletResponse response) {
+//             Binary binary = null;
+//             InputStream in = null;
+               try(Bin binary = new Bin( node.getNode(Property.JCR_CONTENT)
+                               .getProperty(Property.JCR_DATA));InputStream in = binary.getStream()) {
+                       String fileName = node.getName();
+                       String ext = FilenameUtils.getExtension(fileName);
+
+                       // TODO use a more generic / standard approach
+                       // see http://svn.apache.org/viewvc/tomcat/trunk/conf/web.xml
+                       String contentType;
+                       if ("xml".equals(ext))
+                               contentType = "text/xml;charset=" + contentTypeCharset;
+                       else if ("jar".equals(ext))
+                               contentType = "application/java-archive";
+                       else if ("zip".equals(ext))
+                               contentType = "application/zip";
+                       else if ("gz".equals(ext))
+                               contentType = "application/x-gzip";
+                       else if ("bz2".equals(ext))
+                               contentType = "application/x-bzip2";
+                       else if ("tar".equals(ext))
+                               contentType = "application/x-tar";
+                       else if ("rpm".equals(ext))
+                               contentType = "application/x-redhat-package-manager";
+                       else
+                               contentType = "application/octet-stream";
+                       contentType = contentType + ";name=\"" + fileName + "\"";
+                       response.setHeader("Content-Disposition", "attachment; filename=\""
+                                       + fileName + "\"");
+                       response.setHeader("Expires", "0");
+                       response.setHeader("Cache-Control", "no-cache, must-revalidate");
+                       response.setHeader("Pragma", "no-cache");
+
+                       response.setContentType(contentType);
+
+                       IOUtils.copy(in, response.getOutputStream());
+               } catch (RepositoryException e) {
+                       throw new JcrException("Cannot download " + node, e);
+               } catch (IOException e) {
+                       throw new RuntimeException("Cannot download " + node, e);
+               }
+       }
+
+       public void setProxy(ResourceProxy resourceProxy) {
+               this.proxy = resourceProxy;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jcr/proxy/package-info.java b/org.argeo.core/src/org/argeo/jcr/proxy/package-info.java
new file mode 100644 (file)
index 0000000..a578c45
--- /dev/null
@@ -0,0 +1,2 @@
+/** Components to build proxys based on JCR. */
+package org.argeo.jcr.proxy;
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/jcr/unit/AbstractJcrTestCase.java b/org.argeo.core/src/org/argeo/jcr/unit/AbstractJcrTestCase.java
new file mode 100644 (file)
index 0000000..dc2963a
--- /dev/null
@@ -0,0 +1,116 @@
+package org.argeo.jcr.unit;
+
+import java.io.File;
+import java.security.AccessController;
+import java.security.PrivilegedAction;
+
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.SimpleCredentials;
+import javax.security.auth.Subject;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.jcr.JcrException;
+
+import junit.framework.TestCase;
+
+/** Base for unit tests with a JCR repository. */
+public abstract class AbstractJcrTestCase extends TestCase {
+       private final static Log log = LogFactory.getLog(AbstractJcrTestCase.class);
+
+       private Repository repository;
+       private Session session = null;
+
+       public final static String LOGIN_CONTEXT_TEST_SYSTEM = "TEST_JACKRABBIT_ADMIN";
+
+       // protected abstract File getRepositoryFile() throws Exception;
+
+       protected abstract Repository createRepository() throws Exception;
+
+       protected abstract void clearRepository(Repository repository) throws Exception;
+
+       @Override
+       protected void setUp() throws Exception {
+               File homeDir = getHomeDir();
+               FileUtils.deleteDirectory(homeDir);
+               repository = createRepository();
+       }
+
+       @Override
+       protected void tearDown() throws Exception {
+               if (session != null) {
+                       session.logout();
+                       if (log.isTraceEnabled())
+                               log.trace("Logout session");
+               }
+               clearRepository(repository);
+       }
+
+       protected Session session() {
+               if (session != null && session.isLive())
+                       return session;
+               Session session;
+               if (getLoginContext() != null) {
+                       LoginContext lc;
+                       try {
+                               lc = new LoginContext(getLoginContext());
+                               lc.login();
+                       } catch (LoginException e) {
+                               throw new IllegalStateException("JAAS login failed", e);
+                       }
+                       session = Subject.doAs(lc.getSubject(), new PrivilegedAction<Session>() {
+
+                               @Override
+                               public Session run() {
+                                       return login();
+                               }
+
+                       });
+               } else
+                       session = login();
+               this.session = session;
+               return this.session;
+       }
+
+       protected String getLoginContext() {
+               return null;
+       }
+
+       protected Session login() {
+               try {
+                       if (log.isTraceEnabled())
+                               log.trace("Login session");
+                       Subject subject = Subject.getSubject(AccessController.getContext());
+                       if (subject != null)
+                               return getRepository().login();
+                       else
+                               return getRepository().login(new SimpleCredentials("demo", "demo".toCharArray()));
+               } catch (RepositoryException e) {
+                       throw new JcrException("Cannot login to repository", e);
+               }
+       }
+
+       protected Repository getRepository() {
+               return repository;
+       }
+
+       /**
+        * enables children class to set an existing repository in case it is not
+        * deleted on startup, to test migration by instance
+        */
+       public void setRepository(Repository repository) {
+               this.repository = repository;
+       }
+
+       protected File getHomeDir() {
+               File homeDir = new File(System.getProperty("java.io.tmpdir"),
+                               AbstractJcrTestCase.class.getSimpleName() + "-" + System.getProperty("user.name"));
+               return homeDir;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/jcr/unit/package-info.java b/org.argeo.core/src/org/argeo/jcr/unit/package-info.java
new file mode 100644 (file)
index 0000000..c6e7415
--- /dev/null
@@ -0,0 +1,2 @@
+/** Helpers for unit tests with JCR repositories. */
+package org.argeo.jcr.unit;
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/sync/SyncException.java b/org.argeo.core/src/org/argeo/sync/SyncException.java
new file mode 100644 (file)
index 0000000..89bf869
--- /dev/null
@@ -0,0 +1,18 @@
+package org.argeo.sync;
+
+/** Commons exception for sync */
+public class SyncException extends RuntimeException {
+       private static final long serialVersionUID = -3371314343580218538L;
+
+       public SyncException(String message) {
+               super(message);
+       }
+
+       public SyncException(String message, Throwable cause) {
+               super(message, cause);
+       }
+
+       public SyncException(Object source, Object target, Throwable cause) {
+               super("Cannot sync from " + source + " to " + target, cause);
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/sync/SyncResult.java b/org.argeo.core/src/org/argeo/sync/SyncResult.java
new file mode 100644 (file)
index 0000000..6d12ada
--- /dev/null
@@ -0,0 +1,101 @@
+package org.argeo.sync;
+
+import java.time.Instant;
+import java.util.Set;
+import java.util.TreeSet;
+
+/** Describes what happendend during a sync operation. */
+public class SyncResult<T> {
+       private final Set<T> added = new TreeSet<>();
+       private final Set<T> modified = new TreeSet<>();
+       private final Set<T> deleted = new TreeSet<>();
+       private final Set<Error> errors = new TreeSet<>();
+
+       public Set<T> getAdded() {
+               return added;
+       }
+
+       public Set<T> getModified() {
+               return modified;
+       }
+
+       public Set<T> getDeleted() {
+               return deleted;
+       }
+
+       public Set<Error> getErrors() {
+               return errors;
+       }
+
+       public void addError(T sourcePath, T targetPath, Exception e) {
+               Error error = new Error(sourcePath, targetPath, e);
+               errors.add(error);
+       }
+
+       public boolean noModification() {
+               return modified.isEmpty() && deleted.isEmpty() && added.isEmpty();
+       }
+
+       @Override
+       public String toString() {
+               if (noModification())
+                       return "No modification.";
+               StringBuffer sb = new StringBuffer();
+               for (T p : modified)
+                       sb.append("MOD ").append(p).append('\n');
+               for (T p : deleted)
+                       sb.append("DEL ").append(p).append('\n');
+               for (T p : added)
+                       sb.append("ADD ").append(p).append('\n');
+               for (Error error : errors)
+                       sb.append(error).append('\n');
+               return sb.toString();
+       }
+
+       public class Error implements Comparable<Error> {
+               private final T sourcePath;// if null this is a failed delete
+               private final T targetPath;
+               private final Exception exception;
+               private final Instant timestamp = Instant.now();
+
+               public Error(T sourcePath, T targetPath, Exception e) {
+                       super();
+                       this.sourcePath = sourcePath;
+                       this.targetPath = targetPath;
+                       this.exception = e;
+               }
+
+               public T getSourcePath() {
+                       return sourcePath;
+               }
+
+               public T getTargetPath() {
+                       return targetPath;
+               }
+
+               public Exception getException() {
+                       return exception;
+               }
+
+               public Instant getTimestamp() {
+                       return timestamp;
+               }
+
+               @Override
+               public int compareTo(Error o) {
+                       return timestamp.compareTo(o.timestamp);
+               }
+
+               @Override
+               public int hashCode() {
+                       return timestamp.hashCode();
+               }
+
+               @Override
+               public String toString() {
+                       return "ERR " + timestamp + (sourcePath == null ? "Deletion failed" : "Copy failed " + sourcePath) + " "
+                                       + targetPath + " " + exception.getMessage();
+               }
+
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/sync/package-info.java b/org.argeo.core/src/org/argeo/sync/package-info.java
new file mode 100644 (file)
index 0000000..c5e9da0
--- /dev/null
@@ -0,0 +1,2 @@
+/** Synchrnoisation related utilities. */
+package org.argeo.sync;
\ No newline at end of file
diff --git a/org.argeo.core/src/org/argeo/util/CsvParser.java b/org.argeo.core/src/org/argeo/util/CsvParser.java
new file mode 100644 (file)
index 0000000..b903f77
--- /dev/null
@@ -0,0 +1,242 @@
+package org.argeo.util;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Parses a CSV file interpreting the first line as a header. The
+ * {@link #parse(InputStream)} method and the setters are synchronized so that
+ * the object cannot be modified when parsing.
+ */
+public abstract class CsvParser {
+       private char separator = ',';
+       private char quote = '\"';
+
+       private Boolean noHeader = false;
+       private Boolean strictLineAsLongAsHeader = true;
+
+       /**
+        * Actually process a parsed line. If
+        * {@link #setStrictLineAsLongAsHeader(Boolean)} is true (default) the header
+        * and the tokens are guaranteed to have the same size.
+        * 
+        * @param lineNumber the current line number, starts at 1 (the header, if header
+        *                   processing is enabled, the first line otherwise)
+        * @param header     the read-only header or null if
+        *                   {@link #setNoHeader(Boolean)} is true (default is false)
+        * @param tokens     the parsed tokens
+        */
+       protected abstract void processLine(Integer lineNumber, List<String> header, List<String> tokens);
+
+       /**
+        * Parses the CSV file (stream is closed at the end)
+        * 
+        * @param in the stream to parse
+        * 
+        * @deprecated Use {@link #parse(InputStream, Charset)} instead.
+        */
+       @Deprecated
+       public synchronized void parse(InputStream in) {
+               parse(in, (Charset) null);
+       }
+
+       /**
+        * Parses the CSV file (stream is closed at the end)
+        * 
+        * @param in       the stream to parse
+        * @param encoding the encoding to use.
+        * 
+        * @deprecated Use {@link #parse(InputStream, Charset)} instead.
+        */
+       @Deprecated
+       public synchronized void parse(InputStream in, String encoding) {
+               Reader reader;
+               if (encoding == null)
+                       reader = new InputStreamReader(in);
+               else
+                       try {
+                               reader = new InputStreamReader(in, encoding);
+                       } catch (UnsupportedEncodingException e) {
+                               throw new IllegalArgumentException(e);
+                       }
+               parse(reader);
+       }
+
+       /**
+        * Parses the CSV file (stream is closed at the end)
+        * 
+        * @param in      the stream to parse
+        * @param charset the charset to use
+        */
+       public synchronized void parse(InputStream in, Charset charset) {
+               Reader reader;
+               if (charset == null)
+                       reader = new InputStreamReader(in);
+               else
+                       reader = new InputStreamReader(in, charset);
+               parse(reader);
+       }
+
+       /**
+        * Parses the CSV file (stream is closed at the end)
+        * 
+        * @param reader the reader to use (it will be buffered)
+        */
+       public synchronized void parse(Reader reader) {
+               Integer lineCount = 0;
+               try (BufferedReader bufferedReader = new BufferedReader(reader)) {
+                       List<String> header = null;
+                       if (!noHeader) {
+                               String headerStr = bufferedReader.readLine();
+                               if (headerStr == null)// empty file
+                                       return;
+                               lineCount++;
+                               header = new ArrayList<String>();
+                               StringBuffer currStr = new StringBuffer("");
+                               Boolean wasInquote = false;
+                               while (parseLine(headerStr, header, currStr, wasInquote)) {
+                                       headerStr = bufferedReader.readLine();
+                                       if (headerStr == null)
+                                               break;
+                                       wasInquote = true;
+                               }
+                               header = Collections.unmodifiableList(header);
+                       }
+
+                       String line = null;
+                       lines: while ((line = bufferedReader.readLine()) != null) {
+                               line = preProcessLine(line);
+                               if (line == null) {
+                                       // skip line
+                                       continue lines;
+                               }
+                               lineCount++;
+                               List<String> tokens = new ArrayList<String>();
+                               StringBuffer currStr = new StringBuffer("");
+                               Boolean wasInquote = false;
+                               sublines: while (parseLine(line, tokens, currStr, wasInquote)) {
+                                       line = bufferedReader.readLine();
+                                       if (line == null)
+                                               break sublines;
+                                       wasInquote = true;
+                               }
+                               if (!noHeader && strictLineAsLongAsHeader) {
+                                       int headerSize = header.size();
+                                       int tokenSize = tokens.size();
+                                       if (tokenSize == 1 && line.trim().equals(""))
+                                               continue lines;// empty line
+                                       if (headerSize != tokenSize) {
+                                               throw new IllegalStateException("Token size " + tokenSize + " is different from header size "
+                                                               + headerSize + " at line " + lineCount + ", line: " + line + ", header: " + header
+                                                               + ", tokens: " + tokens);
+                                       }
+                               }
+                               processLine(lineCount, header, tokens);
+                       }
+               } catch (IOException e) {
+                       throw new RuntimeException("Cannot parse CSV file (line: " + lineCount + ")", e);
+               }
+       }
+
+       /**
+        * Called before each (logical) line is processed, giving a change to modify it
+        * (typically for cleaning dirty files). To be overridden, return the line
+        * unchanged by default. Skip the line if 'null' is returned.
+        */
+       protected String preProcessLine(String line) {
+               return line;
+       }
+
+       /**
+        * Parses a line character by character for performance purpose
+        * 
+        * @return whether to continue parsing this line
+        */
+       protected Boolean parseLine(String str, List<String> tokens, StringBuffer currStr, Boolean wasInquote) {
+               if (wasInquote)
+                       currStr.append('\n');
+
+               char[] arr = str.toCharArray();
+               boolean inQuote = wasInquote;
+               for (int i = 0; i < arr.length; i++) {
+                       char c = arr[i];
+                       if (c == separator) {
+                               if (!inQuote) {
+                                       tokens.add(currStr.toString());
+//                                     currStr.delete(0, currStr.length());
+                                       currStr.setLength(0);
+                                       currStr.trimToSize();
+                               } else {
+                                       // we don't remove separator that are in a quoted substring
+                                       // System.out
+                                       // .println("IN QUOTE, got a separator: [" + c + "]");
+                                       currStr.append(c);
+                               }
+                       } else if (c == quote) {
+                               if (inQuote && (i + 1) < arr.length && arr[i + 1] == quote) {
+                                       // case of double quote
+                                       currStr.append(quote);
+                                       i++;
+                               } else {// standard
+                                       inQuote = inQuote ? false : true;
+                               }
+                       } else {
+                               currStr.append(c);
+                       }
+               }
+
+               if (!inQuote) {
+                       tokens.add(currStr.toString());
+                       // System.out.println("# TOKEN: " + currStr);
+               }
+               // if (inQuote)
+               // throw new ArgeoException("Missing quote at the end of the line "
+               // + str + " (parsed: " + tokens + ")");
+               if (inQuote)
+                       return true;
+               else
+                       return false;
+               // return tokens;
+       }
+
+       public char getSeparator() {
+               return separator;
+       }
+
+       public synchronized void setSeparator(char separator) {
+               this.separator = separator;
+       }
+
+       public char getQuote() {
+               return quote;
+       }
+
+       public synchronized void setQuote(char quote) {
+               this.quote = quote;
+       }
+
+       public Boolean getNoHeader() {
+               return noHeader;
+       }
+
+       public synchronized void setNoHeader(Boolean noHeader) {
+               this.noHeader = noHeader;
+       }
+
+       public Boolean getStrictLineAsLongAsHeader() {
+               return strictLineAsLongAsHeader;
+       }
+
+       public synchronized void setStrictLineAsLongAsHeader(Boolean strictLineAsLongAsHeader) {
+               this.strictLineAsLongAsHeader = strictLineAsLongAsHeader;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/util/CsvParserWithLinesAsMap.java b/org.argeo.core/src/org/argeo/util/CsvParserWithLinesAsMap.java
new file mode 100644 (file)
index 0000000..8eb6e94
--- /dev/null
@@ -0,0 +1,36 @@
+package org.argeo.util;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * CSV parser allowing to process lines as maps whose keys are the header
+ * fields.
+ */
+public abstract class CsvParserWithLinesAsMap extends CsvParser {
+
+       /**
+        * Actually processes a line.
+        * 
+        * @param lineNumber the current line number, starts at 1 (the header, if header
+        *                   processing is enabled, the first lien otherwise)
+        * @param line       the parsed tokens as a map whose keys are the header fields
+        */
+       protected abstract void processLine(Integer lineNumber, Map<String, String> line);
+
+       protected final void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
+               if (header == null)
+                       throw new IllegalArgumentException("Only CSV with header is supported");
+               Map<String, String> line = new HashMap<String, String>();
+               for (int i = 0; i < header.size(); i++) {
+                       String key = header.get(i);
+                       String value = null;
+                       if (i < tokens.size())
+                               value = tokens.get(i);
+                       line.put(key, value);
+               }
+               processLine(lineNumber, line);
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/util/CsvWriter.java b/org.argeo.core/src/org/argeo/util/CsvWriter.java
new file mode 100644 (file)
index 0000000..41ea65d
--- /dev/null
@@ -0,0 +1,142 @@
+package org.argeo.util;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.UnsupportedEncodingException;
+import java.io.Writer;
+import java.nio.charset.Charset;
+import java.util.Iterator;
+import java.util.List;
+
+/** Write in CSV format. */
+public class CsvWriter {
+       private final Writer out;
+
+       private char separator = ',';
+       private char quote = '\"';
+
+       /**
+        * Creates a CSV writer.
+        * 
+        * @param out the stream to write to. Caller is responsible for closing it.
+        * 
+        * @deprecated Use {@link #CsvWriter(OutputStream, Charset)} instead.
+        * 
+        */
+       @Deprecated
+       public CsvWriter(OutputStream out) {
+               this.out = new OutputStreamWriter(out);
+       }
+
+       /**
+        * Creates a CSV writer.
+        * 
+        * @param out      the stream to write to. Caller is responsible for closing it.
+        * @param encoding the encoding to use.
+        * 
+        * @deprecated Use {@link #CsvWriter(OutputStream, Charset)} instead.
+        */
+       @Deprecated
+       public CsvWriter(OutputStream out, String encoding) {
+               try {
+                       this.out = new OutputStreamWriter(out, encoding);
+               } catch (UnsupportedEncodingException e) {
+                       throw new IllegalArgumentException(e);
+               }
+       }
+
+       /**
+        * Creates a CSV writer.
+        * 
+        * @param out     the stream to write to. Caller is responsible for closing it.
+        * @param charset the charset to use
+        */
+       public CsvWriter(OutputStream out, Charset charset) {
+               this.out = new OutputStreamWriter(out, charset);
+       }
+
+       /**
+        * Write a CSV line. Also used to write a header if needed (this is transparent
+        * for the CSV writer): simply call it first, before writing the lines.
+        */
+       public void writeLine(List<?> tokens) {
+               try {
+                       Iterator<?> it = tokens.iterator();
+                       while (it.hasNext()) {
+                               writeToken(it.next().toString());
+                               if (it.hasNext())
+                                       out.write(separator);
+                       }
+                       out.write('\n');
+                       out.flush();
+               } catch (IOException e) {
+                       throw new RuntimeException("Could not write " + tokens, e);
+               }
+       }
+
+       /**
+        * Write a CSV line. Also used to write a header if needed (this is transparent
+        * for the CSV writer): simply call it first, before writing the lines.
+        */
+       public void writeLine(Object[] tokens) {
+               try {
+                       for (int i = 0; i < tokens.length; i++) {
+                               if (tokens[i] == null) {
+                                       // TODO configure how to deal with null
+                                       writeToken("");
+                               } else {
+                                       writeToken(tokens[i].toString());
+                               }
+                               if (i != (tokens.length - 1))
+                                       out.write(separator);
+                       }
+                       out.write('\n');
+                       out.flush();
+               } catch (IOException e) {
+                       throw new RuntimeException("Could not write " + tokens, e);
+               }
+       }
+
+       protected void writeToken(String token) throws IOException {
+               // +2 for possible quotes, another +2 assuming there would be an already
+               // quoted string where quotes needs to be duplicated
+               // another +2 for safety
+               // we don't want to increase buffer size while writing
+               StringBuffer buf = new StringBuffer(token.length() + 6);
+               char[] arr = token.toCharArray();
+               boolean shouldQuote = false;
+               for (char c : arr) {
+                       if (!shouldQuote) {
+                               if (c == separator)
+                                       shouldQuote = true;
+                               if (c == '\n')
+                                       shouldQuote = true;
+                       }
+
+                       if (c == quote) {
+                               shouldQuote = true;
+                               // duplicate quote
+                               buf.append(quote);
+                       }
+
+                       // generic case
+                       buf.append(c);
+               }
+
+               if (shouldQuote == true)
+                       out.write(quote);
+               out.write(buf.toString());
+               if (shouldQuote == true)
+                       out.write(quote);
+       }
+
+       public void setSeparator(char separator) {
+               this.separator = separator;
+       }
+
+       public void setQuote(char quote) {
+               this.quote = quote;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/util/DictionaryKeys.java b/org.argeo.core/src/org/argeo/util/DictionaryKeys.java
new file mode 100644 (file)
index 0000000..d17c86f
--- /dev/null
@@ -0,0 +1,42 @@
+package org.argeo.util;
+
+import java.util.Dictionary;
+import java.util.Enumeration;
+import java.util.Iterator;
+
+/**
+ * Access the keys of a {@link String}-keyed {@link Dictionary} (common throughout
+ * the OSGi APIs) as an {@link Iterable} so that they are easily usable in
+ * for-each loops.
+ */
+class DictionaryKeys implements Iterable<String> {
+       private final Dictionary<String, ?> dictionary;
+
+       public DictionaryKeys(Dictionary<String, ?> dictionary) {
+               this.dictionary = dictionary;
+       }
+
+       @Override
+       public Iterator<String> iterator() {
+               return new KeyIterator(dictionary.keys());
+       }
+
+       private static class KeyIterator implements Iterator<String> {
+               private final Enumeration<String> keys;
+
+               KeyIterator(Enumeration<String> keys) {
+                       this.keys = keys;
+               }
+
+               @Override
+               public boolean hasNext() {
+                       return keys.hasMoreElements();
+               }
+
+               @Override
+               public String next() {
+                       return keys.nextElement();
+               }
+
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/util/DigestUtils.java b/org.argeo.core/src/org/argeo/util/DigestUtils.java
new file mode 100644 (file)
index 0000000..ce01800
--- /dev/null
@@ -0,0 +1,201 @@
+package org.argeo.util;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.channels.FileChannel.MapMode;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+/** Utilities around cryptographic digests */
+public class DigestUtils {
+       public final static String MD5 = "MD5";
+       public final static String SHA1 = "SHA1";
+       public final static String SHA256 = "SHA-256";
+       public final static String SHA512 = "SHA-512";
+
+       private static Boolean debug = false;
+       // TODO: make it configurable
+       private final static Integer byteBufferCapacity = 100 * 1024;// 100 KB
+
+       public static byte[] sha1(byte[] bytes) {
+               try {
+                       MessageDigest digest = MessageDigest.getInstance(SHA1);
+                       digest.update(bytes);
+                       byte[] checksum = digest.digest();
+                       return checksum;
+               } catch (NoSuchAlgorithmException e) {
+                       throw new IllegalArgumentException(e);
+               }
+       }
+
+       public static String digest(String algorithm, byte[] bytes) {
+               try {
+                       MessageDigest digest = MessageDigest.getInstance(algorithm);
+                       digest.update(bytes);
+                       byte[] checksum = digest.digest();
+                       String res = encodeHexString(checksum);
+                       return res;
+               } catch (NoSuchAlgorithmException e) {
+                       throw new IllegalArgumentException("Cannot digest with algorithm " + algorithm, e);
+               }
+       }
+
+       public static String digest(String algorithm, InputStream in) {
+               try {
+                       MessageDigest digest = MessageDigest.getInstance(algorithm);
+                       // ReadableByteChannel channel = Channels.newChannel(in);
+                       // ByteBuffer bb = ByteBuffer.allocateDirect(byteBufferCapacity);
+                       // while (channel.read(bb) > 0)
+                       // digest.update(bb);
+                       byte[] buffer = new byte[byteBufferCapacity];
+                       int read = 0;
+                       while ((read = in.read(buffer)) > 0) {
+                               digest.update(buffer, 0, read);
+                       }
+
+                       byte[] checksum = digest.digest();
+                       String res = encodeHexString(checksum);
+                       return res;
+               } catch (NoSuchAlgorithmException e) {
+                       throw new IllegalArgumentException("Cannot digest with algorithm " + algorithm, e);
+               } catch (IOException e) {
+                       throw new RuntimeException(e);
+               } finally {
+                       StreamUtils.closeQuietly(in);
+               }
+       }
+
+       public static String digest(String algorithm, File file) {
+               FileInputStream fis = null;
+               FileChannel fc = null;
+               try {
+                       fis = new FileInputStream(file);
+                       fc = fis.getChannel();
+
+                       // Get the file's size and then map it into memory
+                       int sz = (int) fc.size();
+                       ByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, sz);
+                       return digest(algorithm, bb);
+               } catch (IOException e) {
+                       throw new IllegalArgumentException("Cannot digest " + file + " with algorithm " + algorithm, e);
+               } finally {
+                       StreamUtils.closeQuietly(fis);
+                       if (fc.isOpen())
+                               try {
+                                       fc.close();
+                               } catch (IOException e) {
+                                       // silent
+                               }
+               }
+       }
+
+       protected static String digest(String algorithm, ByteBuffer bb) {
+               long begin = System.currentTimeMillis();
+               try {
+                       MessageDigest digest = MessageDigest.getInstance(algorithm);
+                       digest.update(bb);
+                       byte[] checksum = digest.digest();
+                       String res = encodeHexString(checksum);
+                       long end = System.currentTimeMillis();
+                       if (debug)
+                               System.out.println((end - begin) + " ms / " + ((end - begin) / 1000) + " s");
+                       return res;
+               } catch (NoSuchAlgorithmException e) {
+                       throw new IllegalArgumentException("Cannot digest with algorithm " + algorithm, e);
+               }
+       }
+
+       public static String sha1hex(Path path) {
+               return digest(SHA1, path, byteBufferCapacity);
+       }
+
+       public static String digest(String algorithm, Path path, long bufferSize) {
+               byte[] digest = digestRaw(algorithm, path, bufferSize);
+               return encodeHexString(digest);
+       }
+
+       public static byte[] digestRaw(String algorithm, Path file, long bufferSize) {
+               long begin = System.currentTimeMillis();
+               try {
+                       MessageDigest md = MessageDigest.getInstance(algorithm);
+                       FileChannel fc = FileChannel.open(file);
+                       long fileSize = Files.size(file);
+                       if (fileSize <= bufferSize) {
+                               ByteBuffer bb = fc.map(MapMode.READ_ONLY, 0, fileSize);
+                               md.update(bb);
+                       } else {
+                               long lastCycle = (fileSize / bufferSize) - 1;
+                               long position = 0;
+                               for (int i = 0; i <= lastCycle; i++) {
+                                       ByteBuffer bb;
+                                       if (i != lastCycle) {
+                                               bb = fc.map(MapMode.READ_ONLY, position, bufferSize);
+                                               position = position + bufferSize;
+                                       } else {
+                                               bb = fc.map(MapMode.READ_ONLY, position, fileSize - position);
+                                               position = fileSize;
+                                       }
+                                       md.update(bb);
+                               }
+                       }
+                       long end = System.currentTimeMillis();
+                       if (debug)
+                               System.out.println((end - begin) + " ms / " + ((end - begin) / 1000) + " s");
+                       return md.digest();
+               } catch (NoSuchAlgorithmException e) {
+                       throw new IllegalArgumentException("Cannot digest " + file + "  with algorithm " + algorithm, e);
+               } catch (IOException e) {
+                       throw new RuntimeException("Cannot digest " + file + "  with algorithm " + algorithm, e);
+               }
+       }
+
+       public static void main(String[] args) {
+               File file;
+               if (args.length > 0)
+                       file = new File(args[0]);
+               else {
+                       System.err.println("Usage: <file> [<algorithm>]" + " (see http://java.sun.com/j2se/1.5.0/"
+                                       + "docs/guide/security/CryptoSpec.html#AppA)");
+                       return;
+               }
+
+               if (args.length > 1) {
+                       String algorithm = args[1];
+                       System.out.println(digest(algorithm, file));
+               } else {
+                       String algorithm = "MD5";
+                       System.out.println(algorithm + ": " + digest(algorithm, file));
+                       algorithm = "SHA";
+                       System.out.println(algorithm + ": " + digest(algorithm, file));
+                       System.out.println(algorithm + ": " + sha1hex(file.toPath()));
+                       algorithm = "SHA-256";
+                       System.out.println(algorithm + ": " + digest(algorithm, file));
+                       algorithm = "SHA-512";
+                       System.out.println(algorithm + ": " + digest(algorithm, file));
+               }
+       }
+
+       final private static char[] hexArray = "0123456789abcdef".toCharArray();
+
+       /**
+        * From
+        * http://stackoverflow.com/questions/9655181/how-to-convert-a-byte-array-to
+        * -a-hex-string-in-java
+        */
+       public static String encodeHexString(byte[] bytes) {
+               char[] hexChars = new char[bytes.length * 2];
+               for (int j = 0; j < bytes.length; j++) {
+                       int v = bytes[j] & 0xFF;
+                       hexChars[j * 2] = hexArray[v >>> 4];
+                       hexChars[j * 2 + 1] = hexArray[v & 0x0F];
+               }
+               return new String(hexChars);
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/util/DirH.java b/org.argeo.core/src/org/argeo/util/DirH.java
new file mode 100644 (file)
index 0000000..b6d962f
--- /dev/null
@@ -0,0 +1,116 @@
+package org.argeo.util;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.charset.Charset;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/** Hashes the hashes of the files in a directory. */
+public class DirH {
+
+       private final static Charset charset = Charset.forName("UTF-16");
+       private final static long bufferSize = 200 * 1024 * 1024;
+       private final static String algorithm = "SHA";
+
+       private final static byte EOL = (byte) '\n';
+       private final static byte SPACE = (byte) ' ';
+
+       private final int hashSize;
+
+       private final byte[][] hashes;
+       private final byte[][] fileNames;
+       private final byte[] digest;
+       private final byte[] dirName;
+
+       /**
+        * @param dirName can be null or empty
+        */
+       private DirH(byte[][] hashes, byte[][] fileNames, byte[] dirName) {
+               if (hashes.length != fileNames.length)
+                       throw new IllegalArgumentException(hashes.length + " hashes and " + fileNames.length + " file names");
+               this.hashes = hashes;
+               this.fileNames = fileNames;
+               this.dirName = dirName == null ? new byte[0] : dirName;
+               if (hashes.length == 0) {// empty dir
+                       hashSize = 20;
+                       // FIXME what is the digest of an empty dir?
+                       digest = new byte[hashSize];
+                       Arrays.fill(digest, SPACE);
+                       return;
+               }
+               hashSize = hashes[0].length;
+               for (int i = 0; i < hashes.length; i++) {
+                       if (hashes[i].length != hashSize)
+                               throw new IllegalArgumentException(
+                                               "Hash size for " + new String(fileNames[i], charset) + " is " + hashes[i].length);
+               }
+
+               try {
+                       MessageDigest md = MessageDigest.getInstance(algorithm);
+                       for (int i = 0; i < hashes.length; i++) {
+                               md.update(this.hashes[i]);
+                               md.update(SPACE);
+                               md.update(this.fileNames[i]);
+                               md.update(EOL);
+                       }
+                       digest = md.digest();
+               } catch (NoSuchAlgorithmException e) {
+                       throw new IllegalArgumentException("Cannot digest", e);
+               }
+       }
+
+       public void print(PrintStream out) {
+               out.print(DigestUtils.encodeHexString(digest));
+               if (dirName.length > 0) {
+                       out.print(' ');
+                       out.print(new String(dirName, charset));
+               }
+               out.print('\n');
+               for (int i = 0; i < hashes.length; i++) {
+                       out.print(DigestUtils.encodeHexString(hashes[i]));
+                       out.print(' ');
+                       out.print(new String(fileNames[i], charset));
+                       out.print('\n');
+               }
+       }
+
+       public static DirH digest(Path dir) {
+               try (DirectoryStream<Path> files = Files.newDirectoryStream(dir)) {
+                       List<byte[]> hs = new ArrayList<byte[]>();
+                       List<String> fNames = new ArrayList<>();
+                       for (Path file : files) {
+                               if (!Files.isDirectory(file)) {
+                                       byte[] digest = DigestUtils.digestRaw(algorithm, file, bufferSize);
+                                       hs.add(digest);
+                                       fNames.add(file.getFileName().toString());
+                               }
+                       }
+
+                       byte[][] fileNames = new byte[fNames.size()][];
+                       for (int i = 0; i < fNames.size(); i++) {
+                               fileNames[i] = fNames.get(i).getBytes(charset);
+                       }
+                       byte[][] hashes = hs.toArray(new byte[hs.size()][]);
+                       return new DirH(hashes, fileNames, dir.toString().getBytes(charset));
+               } catch (IOException e) {
+                       throw new RuntimeException("Cannot digest " + dir, e);
+               }
+       }
+
+       public static void main(String[] args) {
+               try {
+                       DirH dirH = DirH.digest(Paths.get("/home/mbaudier/tmp/"));
+                       dirH.print(System.out);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/util/LangUtils.java b/org.argeo.core/src/org/argeo/util/LangUtils.java
new file mode 100644 (file)
index 0000000..7824d12
--- /dev/null
@@ -0,0 +1,253 @@
+package org.argeo.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardOpenOption;
+import java.time.ZonedDateTime;
+import java.time.temporal.ChronoUnit;
+import java.time.temporal.Temporal;
+import java.util.Dictionary;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Hashtable;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.naming.InvalidNameException;
+import javax.naming.ldap.LdapName;
+
+/** Utilities around Java basic features. */
+public class LangUtils {
+       /*
+        * NON-API OSGi
+        */
+       /**
+        * Returns an array with the names of the provided classes. Useful when
+        * registering services with multiple interfaces in OSGi.
+        */
+       public static String[] names(Class<?>... clzz) {
+               String[] res = new String[clzz.length];
+               for (int i = 0; i < clzz.length; i++)
+                       res[i] = clzz[i].getName();
+               return res;
+       }
+
+       /*
+        * MAP
+        */
+       /**
+        * Creates a new {@link Dictionary} with one key-value pair. Key should not be
+        * null, but if the value is null, it returns an empty {@link Dictionary}.
+        */
+       public static Map<String, Object> map(String key, Object value) {
+               assert key != null;
+               HashMap<String, Object> props = new HashMap<>();
+               if (value != null)
+                       props.put(key, value);
+               return props;
+       }
+
+       /*
+        * DICTIONARY
+        */
+
+       /**
+        * Creates a new {@link Dictionary} with one key-value pair. Key should not be
+        * null, but if the value is null, it returns an empty {@link Dictionary}.
+        */
+       public static Dictionary<String, Object> dict(String key, Object value) {
+               assert key != null;
+               Hashtable<String, Object> props = new Hashtable<>();
+               if (value != null)
+                       props.put(key, value);
+               return props;
+       }
+
+       /** @deprecated Use {@link #dict(String, Object)} instead. */
+       @Deprecated
+       public static Dictionary<String, Object> dico(String key, Object value) {
+               return dict(key, value);
+       }
+
+       /** Converts a {@link Dictionary} to a {@link Map} of strings. */
+       public static Map<String, String> dictToStringMap(Dictionary<String, ?> properties) {
+               if (properties == null) {
+                       return null;
+               }
+               Map<String, String> res = new HashMap<>(properties.size());
+               Enumeration<String> keys = properties.keys();
+               while (keys.hasMoreElements()) {
+                       String key = keys.nextElement();
+                       res.put(key, properties.get(key).toString());
+               }
+               return res;
+       }
+
+       /**
+        * Get a string property from this map, expecting to find it, or
+        * <code>null</code> if not found.
+        */
+       public static String get(Map<String, ?> map, String key) {
+               Object res = map.get(key);
+               if (res == null)
+                       return null;
+               return res.toString();
+       }
+
+       /**
+        * Get a string property from this map, expecting to find it.
+        * 
+        * @throws IllegalArgumentException if the key was not found
+        */
+       public static String getNotNull(Map<String, ?> map, String key) {
+               Object res = map.get(key);
+               if (res == null)
+                       throw new IllegalArgumentException("Map " + map + " should contain key " + key);
+               return res.toString();
+       }
+
+       /**
+        * Wraps the keys of the provided {@link Dictionary} as an {@link Iterable}.
+        */
+       public static Iterable<String> keys(Dictionary<String, ?> props) {
+               assert props != null;
+               return new DictionaryKeys(props);
+       }
+
+       static String toJson(Dictionary<String, ?> props) {
+               return toJson(props, false);
+       }
+
+       static String toJson(Dictionary<String, ?> props, boolean pretty) {
+               StringBuilder sb = new StringBuilder();
+               sb.append('{');
+               if (pretty)
+                       sb.append('\n');
+               Enumeration<String> keys = props.keys();
+               while (keys.hasMoreElements()) {
+                       String key = keys.nextElement();
+                       if (pretty)
+                               sb.append(' ');
+                       sb.append('\"').append(key).append('\"');
+                       if (pretty)
+                               sb.append(" : ");
+                       else
+                               sb.append(':');
+                       sb.append('\"').append(props.get(key)).append('\"');
+                       if (keys.hasMoreElements())
+                               sb.append(", ");
+                       if (pretty)
+                               sb.append('\n');
+               }
+               sb.append('}');
+               return sb.toString();
+       }
+
+       static void storeAsProperties(Dictionary<String, Object> props, Path path) throws IOException {
+               if (props == null)
+                       throw new IllegalArgumentException("Props cannot be null");
+               Properties toStore = new Properties();
+               for (Enumeration<String> keys = props.keys(); keys.hasMoreElements();) {
+                       String key = keys.nextElement();
+                       toStore.setProperty(key, props.get(key).toString());
+               }
+               try (OutputStream out = Files.newOutputStream(path)) {
+                       toStore.store(out, null);
+               }
+       }
+
+       static void appendAsLdif(String dnBase, String dnKey, Dictionary<String, Object> props, Path path)
+                       throws IOException {
+               if (props == null)
+                       throw new IllegalArgumentException("Props cannot be null");
+               Object dnValue = props.get(dnKey);
+               String dnStr = dnKey + '=' + dnValue + ',' + dnBase;
+               LdapName dn;
+               try {
+                       dn = new LdapName(dnStr);
+               } catch (InvalidNameException e) {
+                       throw new IllegalArgumentException("Cannot interpret DN " + dnStr, e);
+               }
+               if (dnValue == null)
+                       throw new IllegalArgumentException("DN key " + dnKey + " must have a value");
+               try (Writer writer = Files.newBufferedWriter(path, StandardOpenOption.APPEND, StandardOpenOption.CREATE)) {
+                       writer.append("\ndn: ");
+                       writer.append(dn.toString());
+                       writer.append('\n');
+                       for (Enumeration<String> keys = props.keys(); keys.hasMoreElements();) {
+                               String key = keys.nextElement();
+                               Object value = props.get(key);
+                               writer.append(key);
+                               writer.append(": ");
+                               // FIXME deal with binary and multiple values
+                               writer.append(value.toString());
+                               writer.append('\n');
+                       }
+               }
+       }
+
+       static Dictionary<String, Object> loadFromProperties(Path path) throws IOException {
+               Properties toLoad = new Properties();
+               try (InputStream in = Files.newInputStream(path)) {
+                       toLoad.load(in);
+               }
+               Dictionary<String, Object> res = new Hashtable<String, Object>();
+               for (Object key : toLoad.keySet())
+                       res.put(key.toString(), toLoad.get(key));
+               return res;
+       }
+
+       /*
+        * EXCEPTIONS
+        */
+       /**
+        * Chain the messages of all causes (one per line, <b>starts with a line
+        * return</b>) without all the stack
+        */
+       public static String chainCausesMessages(Throwable t) {
+               StringBuffer buf = new StringBuffer();
+               chainCauseMessage(buf, t);
+               return buf.toString();
+       }
+
+       /** Recursive chaining of messages */
+       private static void chainCauseMessage(StringBuffer buf, Throwable t) {
+               buf.append('\n').append(' ').append(t.getClass().getCanonicalName()).append(": ").append(t.getMessage());
+               if (t.getCause() != null)
+                       chainCauseMessage(buf, t.getCause());
+       }
+
+       /*
+        * TIME
+        */
+       /** Formats time elapsed since start. */
+       public static String since(ZonedDateTime start) {
+               ZonedDateTime now = ZonedDateTime.now();
+               return duration(start, now);
+       }
+
+       /** Formats a duration. */
+       public static String duration(Temporal start, Temporal end) {
+               long count = ChronoUnit.DAYS.between(start, end);
+               if (count != 0)
+                       return count > 1 ? count + " days" : count + " day";
+               count = ChronoUnit.HOURS.between(start, end);
+               if (count != 0)
+                       return count > 1 ? count + " hours" : count + " hours";
+               count = ChronoUnit.MINUTES.between(start, end);
+               if (count != 0)
+                       return count > 1 ? count + " minutes" : count + " minute";
+               count = ChronoUnit.SECONDS.between(start, end);
+               return count > 1 ? count + " seconds" : count + " second";
+       }
+
+       /** Singleton constructor. */
+       private LangUtils() {
+
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/util/OS.java b/org.argeo.core/src/org/argeo/util/OS.java
new file mode 100644 (file)
index 0000000..d8127b6
--- /dev/null
@@ -0,0 +1,56 @@
+package org.argeo.util;
+
+import java.io.File;
+import java.lang.management.ManagementFactory;
+
+/** When OS specific informations are needed. */
+public class OS {
+       public final static OS LOCAL = new OS();
+
+       private final String arch, name, version;
+
+       /** The OS of the running JVM */
+       protected OS() {
+               arch = System.getProperty("os.arch");
+               name = System.getProperty("os.name");
+               version = System.getProperty("os.version");
+       }
+
+       public String getArch() {
+               return arch;
+       }
+
+       public String getName() {
+               return name;
+       }
+
+       public String getVersion() {
+               return version;
+       }
+
+       public boolean isMSWindows() {
+               // only MS Windows would use such an horrendous separator...
+               return File.separatorChar == '\\';
+       }
+
+       public String[] getDefaultShellCommand() {
+               if (!isMSWindows())
+                       return new String[] { "/bin/sh", "-l", "-i" };
+               else
+                       return new String[] { "cmd.exe", "/C" };
+       }
+
+       public static Integer getJvmPid() {
+               /*
+                * This method works on most platforms (including Linux). Although when Java 9
+                * comes along, there is a better way: long pid =
+                * ProcessHandle.current().getPid();
+                *
+                * See:
+                * http://stackoverflow.com/questions/35842/how-can-a-java-program-get-its-own-
+                * process-id
+                */
+               String pidAndHost = ManagementFactory.getRuntimeMXBean().getName();
+               return Integer.parseInt(pidAndHost.substring(0, pidAndHost.indexOf('@')));
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/util/PasswordEncryption.java b/org.argeo.core/src/org/argeo/util/PasswordEncryption.java
new file mode 100644 (file)
index 0000000..c95c787
--- /dev/null
@@ -0,0 +1,216 @@
+package org.argeo.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.security.GeneralSecurityException;
+import java.security.InvalidKeyException;
+import java.security.Key;
+
+import javax.crypto.Cipher;
+import javax.crypto.CipherInputStream;
+import javax.crypto.CipherOutputStream;
+import javax.crypto.SecretKey;
+import javax.crypto.SecretKeyFactory;
+import javax.crypto.spec.IvParameterSpec;
+import javax.crypto.spec.PBEKeySpec;
+import javax.crypto.spec.SecretKeySpec;
+
+public class PasswordEncryption {
+       public final static Integer DEFAULT_ITERATION_COUNT = 1024;
+       /** Stronger with 256, but causes problem with Oracle JVM */
+       public final static Integer DEFAULT_SECRETE_KEY_LENGTH = 256;
+       public final static Integer DEFAULT_SECRETE_KEY_LENGTH_RESTRICTED = 128;
+       public final static String DEFAULT_SECRETE_KEY_FACTORY = "PBKDF2WithHmacSHA1";
+       public final static String DEFAULT_SECRETE_KEY_ENCRYPTION = "AES";
+       public final static String DEFAULT_CIPHER_NAME = "AES/CBC/PKCS5Padding";
+//     public final static String DEFAULT_CHARSET = "UTF-8";
+       public final static Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
+
+       private Integer iterationCount = DEFAULT_ITERATION_COUNT;
+       private Integer secreteKeyLength = DEFAULT_SECRETE_KEY_LENGTH;
+       private String secreteKeyFactoryName = DEFAULT_SECRETE_KEY_FACTORY;
+       private String secreteKeyEncryption = DEFAULT_SECRETE_KEY_ENCRYPTION;
+       private String cipherName = DEFAULT_CIPHER_NAME;
+
+       private static byte[] DEFAULT_SALT_8 = { (byte) 0xA9, (byte) 0x9B, (byte) 0xC8, (byte) 0x32, (byte) 0x56,
+                       (byte) 0x35, (byte) 0xE3, (byte) 0x03 };
+       private static byte[] DEFAULT_IV_16 = { (byte) 0xA9, (byte) 0x9B, (byte) 0xC8, (byte) 0x32, (byte) 0x56,
+                       (byte) 0x35, (byte) 0xE3, (byte) 0x03, (byte) 0xA9, (byte) 0x9B, (byte) 0xC8, (byte) 0x32, (byte) 0x56,
+                       (byte) 0x35, (byte) 0xE3, (byte) 0x03 };
+
+       private Key key;
+       private Cipher ecipher;
+       private Cipher dcipher;
+
+       private String securityProviderName = null;
+
+       /**
+        * This is up to the caller to clear the passed array. Neither copy of nor
+        * reference to the passed array is kept
+        */
+       public PasswordEncryption(char[] password) {
+               this(password, DEFAULT_SALT_8, DEFAULT_IV_16);
+       }
+
+       /**
+        * This is up to the caller to clear the passed array. Neither copies of nor
+        * references to the passed arrays are kept
+        */
+       public PasswordEncryption(char[] password, byte[] passwordSalt, byte[] initializationVector) {
+               try {
+                       initKeyAndCiphers(password, passwordSalt, initializationVector);
+               } catch (InvalidKeyException e) {
+                       Integer previousSecreteKeyLength = secreteKeyLength;
+                       secreteKeyLength = DEFAULT_SECRETE_KEY_LENGTH_RESTRICTED;
+                       System.err.println("'" + e.getMessage() + "', will use " + secreteKeyLength
+                                       + " secrete key length instead of " + previousSecreteKeyLength);
+                       try {
+                               initKeyAndCiphers(password, passwordSalt, initializationVector);
+                       } catch (GeneralSecurityException e1) {
+                               throw new IllegalStateException("Cannot get secret key (with restricted length)", e1);
+                       }
+               } catch (GeneralSecurityException e) {
+                       throw new IllegalStateException("Cannot get secret key", e);
+               }
+       }
+
+       protected void initKeyAndCiphers(char[] password, byte[] passwordSalt, byte[] initializationVector)
+                       throws GeneralSecurityException {
+               byte[] salt = new byte[8];
+               System.arraycopy(passwordSalt, 0, salt, 0, salt.length);
+               // for (int i = 0; i < password.length && i < salt.length; i++)
+               // salt[i] = (byte) password[i];
+               byte[] iv = new byte[16];
+               System.arraycopy(initializationVector, 0, iv, 0, iv.length);
+
+               SecretKeyFactory keyFac = SecretKeyFactory.getInstance(getSecretKeyFactoryName());
+               PBEKeySpec keySpec = new PBEKeySpec(password, salt, getIterationCount(), getKeyLength());
+               String secKeyEncryption = getSecretKeyEncryption();
+               if (secKeyEncryption != null) {
+                       SecretKey tmp = keyFac.generateSecret(keySpec);
+                       key = new SecretKeySpec(tmp.getEncoded(), getSecretKeyEncryption());
+               } else {
+                       key = keyFac.generateSecret(keySpec);
+               }
+               if (securityProviderName != null)
+                       ecipher = Cipher.getInstance(getCipherName(), securityProviderName);
+               else
+                       ecipher = Cipher.getInstance(getCipherName());
+               ecipher.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(iv));
+               dcipher = Cipher.getInstance(getCipherName());
+               dcipher.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(iv));
+       }
+
+       public void encrypt(InputStream decryptedIn, OutputStream encryptedOut) throws IOException {
+               try {
+                       CipherOutputStream out = new CipherOutputStream(encryptedOut, ecipher);
+                       StreamUtils.copy(decryptedIn, out);
+                       StreamUtils.closeQuietly(out);
+               } catch (IOException e) {
+                       throw e;
+               } finally {
+                       StreamUtils.closeQuietly(decryptedIn);
+               }
+       }
+
+       public void decrypt(InputStream encryptedIn, OutputStream decryptedOut) throws IOException {
+               try {
+                       CipherInputStream decryptedIn = new CipherInputStream(encryptedIn, dcipher);
+                       StreamUtils.copy(decryptedIn, decryptedOut);
+               } catch (IOException e) {
+                       throw e;
+               } finally {
+                       StreamUtils.closeQuietly(encryptedIn);
+               }
+       }
+
+       public byte[] encryptString(String str) {
+               ByteArrayOutputStream out = null;
+               ByteArrayInputStream in = null;
+               try {
+                       out = new ByteArrayOutputStream();
+                       in = new ByteArrayInputStream(str.getBytes(DEFAULT_CHARSET));
+                       encrypt(in, out);
+                       return out.toByteArray();
+               } catch (IOException e) {
+                       throw new RuntimeException(e);
+               } finally {
+                       StreamUtils.closeQuietly(out);
+               }
+       }
+
+       /** Closes the input stream */
+       public String decryptAsString(InputStream in) {
+               ByteArrayOutputStream out = null;
+               try {
+                       out = new ByteArrayOutputStream();
+                       decrypt(in, out);
+                       return new String(out.toByteArray(), DEFAULT_CHARSET);
+               } catch (IOException e) {
+                       throw new RuntimeException(e);
+               } finally {
+                       StreamUtils.closeQuietly(out);
+               }
+       }
+
+       protected Key getKey() {
+               return key;
+       }
+
+       protected Cipher getEcipher() {
+               return ecipher;
+       }
+
+       protected Cipher getDcipher() {
+               return dcipher;
+       }
+
+       protected Integer getIterationCount() {
+               return iterationCount;
+       }
+
+       protected Integer getKeyLength() {
+               return secreteKeyLength;
+       }
+
+       protected String getSecretKeyFactoryName() {
+               return secreteKeyFactoryName;
+       }
+
+       protected String getSecretKeyEncryption() {
+               return secreteKeyEncryption;
+       }
+
+       protected String getCipherName() {
+               return cipherName;
+       }
+
+       public void setIterationCount(Integer iterationCount) {
+               this.iterationCount = iterationCount;
+       }
+
+       public void setSecreteKeyLength(Integer keyLength) {
+               this.secreteKeyLength = keyLength;
+       }
+
+       public void setSecreteKeyFactoryName(String secreteKeyFactoryName) {
+               this.secreteKeyFactoryName = secreteKeyFactoryName;
+       }
+
+       public void setSecreteKeyEncryption(String secreteKeyEncryption) {
+               this.secreteKeyEncryption = secreteKeyEncryption;
+       }
+
+       public void setCipherName(String cipherName) {
+               this.cipherName = cipherName;
+       }
+
+       public void setSecurityProviderName(String securityProviderName) {
+               this.securityProviderName = securityProviderName;
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/util/ServiceChannel.java b/org.argeo.core/src/org/argeo/util/ServiceChannel.java
new file mode 100644 (file)
index 0000000..7997384
--- /dev/null
@@ -0,0 +1,78 @@
+package org.argeo.util;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.channels.AsynchronousByteChannel;
+import java.nio.channels.CompletionHandler;
+import java.nio.channels.ReadableByteChannel;
+import java.nio.channels.WritableByteChannel;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+
+/** An {@link AsynchronousByteChannel} based on an {@link ExecutorService}. */
+public class ServiceChannel implements AsynchronousByteChannel {
+       private final ReadableByteChannel in;
+       private final WritableByteChannel out;
+
+       private boolean open = true;
+
+       private ExecutorService executor;
+
+       public ServiceChannel(ReadableByteChannel in, WritableByteChannel out, ExecutorService executor) {
+               this.in = in;
+               this.out = out;
+               this.executor = executor;
+       }
+
+       @Override
+       public Future<Integer> read(ByteBuffer dst) {
+               return executor.submit(() -> in.read(dst));
+       }
+
+       @Override
+       public <A> void read(ByteBuffer dst, A attachment, CompletionHandler<Integer, ? super A> handler) {
+               try {
+                       Future<Integer> res = read(dst);
+                       handler.completed(res.get(), attachment);
+               } catch (Exception e) {
+                       handler.failed(e, attachment);
+               }
+       }
+
+       @Override
+       public Future<Integer> write(ByteBuffer src) {
+               return executor.submit(() -> out.write(src));
+       }
+
+       @Override
+       public <A> void write(ByteBuffer src, A attachment, CompletionHandler<Integer, ? super A> handler) {
+               try {
+                       Future<Integer> res = write(src);
+                       handler.completed(res.get(), attachment);
+               } catch (Exception e) {
+                       handler.failed(e, attachment);
+               }
+       }
+
+       @Override
+       public synchronized void close() throws IOException {
+               try {
+                       in.close();
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+               try {
+                       out.close();
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+               open = false;
+               notifyAll();
+       }
+
+       @Override
+       public synchronized boolean isOpen() {
+               return open;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/util/StreamUtils.java b/org.argeo.core/src/org/argeo/util/StreamUtils.java
new file mode 100644 (file)
index 0000000..6d7d940
--- /dev/null
@@ -0,0 +1,81 @@
+package org.argeo.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Reader;
+import java.io.Writer;
+
+/** Utilities to be used when Apache Commons IO is not available. */
+class StreamUtils {
+       private static final int DEFAULT_BUFFER_SIZE = 1024 * 4;
+
+       /*
+        * APACHE COMMONS IO (inspired)
+        */
+
+       /** @return the number of bytes */
+       public static Long copy(InputStream in, OutputStream out)
+                       throws IOException {
+               Long count = 0l;
+               byte[] buf = new byte[DEFAULT_BUFFER_SIZE];
+               while (true) {
+                       int length = in.read(buf);
+                       if (length < 0)
+                               break;
+                       out.write(buf, 0, length);
+                       count = count + length;
+               }
+               return count;
+       }
+
+       /** @return the number of chars */
+       public static Long copy(Reader in, Writer out) throws IOException {
+               Long count = 0l;
+               char[] buf = new char[DEFAULT_BUFFER_SIZE];
+               while (true) {
+                       int length = in.read(buf);
+                       if (length < 0)
+                               break;
+                       out.write(buf, 0, length);
+                       count = count + length;
+               }
+               return count;
+       }
+
+       public static void closeQuietly(InputStream in) {
+               if (in != null)
+                       try {
+                               in.close();
+                       } catch (Exception e) {
+                               //
+                       }
+       }
+
+       public static void closeQuietly(OutputStream out) {
+               if (out != null)
+                       try {
+                               out.close();
+                       } catch (Exception e) {
+                               //
+                       }
+       }
+
+       public static void closeQuietly(Reader in) {
+               if (in != null)
+                       try {
+                               in.close();
+                       } catch (Exception e) {
+                               //
+                       }
+       }
+
+       public static void closeQuietly(Writer out) {
+               if (out != null)
+                       try {
+                               out.close();
+                       } catch (Exception e) {
+                               //
+                       }
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/util/Tester.java b/org.argeo.core/src/org/argeo/util/Tester.java
new file mode 100644 (file)
index 0000000..31a2be4
--- /dev/null
@@ -0,0 +1,126 @@
+package org.argeo.util;
+
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/** A generic tester based on Java assertions and functional programming. */
+public class Tester {
+       private Map<String, TesterStatus> results = Collections.synchronizedSortedMap(new TreeMap<>());
+
+       private ClassLoader classLoader;
+
+       /** Use {@link Thread#getContextClassLoader()} by default. */
+       public Tester() {
+               this(Thread.currentThread().getContextClassLoader());
+       }
+
+       public Tester(ClassLoader classLoader) {
+               this.classLoader = classLoader;
+       }
+
+       public void execute(String className) {
+               Class<?> clss;
+               try {
+                       clss = classLoader.loadClass(className);
+                       boolean assertionsEnabled = clss.desiredAssertionStatus();
+                       if (!assertionsEnabled)
+                               throw new IllegalStateException("Test runner " + getClass().getName()
+                                               + " requires Java assertions to be enabled. Call the JVM with the -ea argument.");
+               } catch (Exception e1) {
+                       throw new IllegalArgumentException("Cannot initalise test for " + className, e1);
+
+               }
+               List<Method> methods = findMethods(clss);
+               if (methods.size() == 0)
+                       throw new IllegalArgumentException("No test method found in " + clss);
+               // TODO make order more predictable?
+               for (Method method : methods) {
+                       String uid = method.getDeclaringClass().getName() + "#" + method.getName();
+                       TesterStatus testStatus = new TesterStatus(uid);
+                       Object obj = null;
+                       try {
+                               beforeTest(uid, method);
+                               obj = clss.getDeclaredConstructor().newInstance();
+                               method.invoke(obj);
+                               testStatus.setPassed();
+                               afterTestPassed(uid, method, obj);
+                       } catch (Exception e) {
+                               testStatus.setFailed(e);
+                               afterTestFailed(uid, method, obj, e);
+                       } finally {
+                               results.put(uid, testStatus);
+                       }
+               }
+       }
+
+       protected void beforeTest(String uid, Method method) {
+               // System.out.println(uid + ": STARTING");
+       }
+
+       protected void afterTestPassed(String uid, Method method, Object obj) {
+               System.out.println(uid + ": PASSED");
+       }
+
+       protected void afterTestFailed(String uid, Method method, Object obj, Throwable e) {
+               System.out.println(uid + ": FAILED");
+               e.printStackTrace();
+       }
+
+       protected List<Method> findMethods(Class<?> clss) {
+               List<Method> methods = new ArrayList<Method>();
+//             Method call = getMethod(clss, "call");
+//             if (call != null)
+//                     methods.add(call);
+//
+               for (Method method : clss.getMethods()) {
+                       if (method.getName().startsWith("test")) {
+                               methods.add(method);
+                       }
+               }
+               return methods;
+       }
+
+       protected Method getMethod(Class<?> clss, String name, Class<?>... parameterTypes) {
+               try {
+                       return clss.getMethod(name, parameterTypes);
+               } catch (NoSuchMethodException e) {
+                       return null;
+               } catch (SecurityException e) {
+                       throw new IllegalStateException(e);
+               }
+       }
+
+       public static void main(String[] args) {
+               // deal with arguments
+               String className;
+               if (args.length < 1) {
+                       System.err.println(usage());
+                       System.exit(1);
+                       throw new IllegalArgumentException();
+               } else {
+                       className = args[0];
+               }
+
+               Tester test = new Tester();
+               try {
+                       test.execute(className);
+               } catch (Throwable e) {
+                       e.printStackTrace();
+               }
+
+               Map<String, TesterStatus> r = test.results;
+               for (String uid : r.keySet()) {
+                       TesterStatus testStatus = r.get(uid);
+                       System.out.println(testStatus);
+               }
+       }
+
+       public static String usage() {
+               return "java " + Tester.class.getName() + " [test class name]";
+
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/util/TesterStatus.java b/org.argeo.core/src/org/argeo/util/TesterStatus.java
new file mode 100644 (file)
index 0000000..d1d14ed
--- /dev/null
@@ -0,0 +1,98 @@
+package org.argeo.util;
+
+import java.io.Serializable;
+
+/** The status of a test. */
+public class TesterStatus implements Serializable {
+       private static final long serialVersionUID = 6272975746885487000L;
+
+       private Boolean passed = null;
+       private final String uid;
+       private Throwable throwable = null;
+
+       public TesterStatus(String uid) {
+               this.uid = uid;
+       }
+
+       /** For cloning. */
+       public TesterStatus(String uid, Boolean passed, Throwable throwable) {
+               this(uid);
+               this.passed = passed;
+               this.throwable = throwable;
+       }
+
+       public synchronized Boolean isRunning() {
+               return passed == null;
+       }
+
+       public synchronized Boolean isPassed() {
+               assert passed != null;
+               return passed;
+       }
+
+       public synchronized Boolean isFailed() {
+               assert passed != null;
+               return !passed;
+       }
+
+       public synchronized void setPassed() {
+               setStatus(true);
+       }
+
+       public synchronized void setFailed() {
+               setStatus(false);
+       }
+
+       public synchronized void setFailed(Throwable throwable) {
+               setStatus(false);
+               setThrowable(throwable);
+       }
+
+       protected void setStatus(Boolean passed) {
+               if (this.passed != null)
+                       throw new IllegalStateException("Passed status of test " + uid + " is already set (to " + passed + ")");
+               this.passed = passed;
+       }
+
+       protected void setThrowable(Throwable throwable) {
+               if (this.throwable != null)
+                       throw new IllegalStateException("Throwable of test " + uid + " is already set (to " + passed + ")");
+               this.throwable = throwable;
+       }
+
+       public String getUid() {
+               return uid;
+       }
+
+       public Throwable getThrowable() {
+               return throwable;
+       }
+
+       @Override
+       protected Object clone() throws CloneNotSupportedException {
+               // TODO Auto-generated method stub
+               return super.clone();
+       }
+
+       @Override
+       public boolean equals(Object o) {
+               if (o instanceof TesterStatus) {
+                       TesterStatus other = (TesterStatus) o;
+                       // we don't check consistency for performance purposes
+                       // this equals() is supposed to be used in collections or for transfer
+                       return other.uid.equals(uid);
+               }
+               return false;
+       }
+
+       @Override
+       public int hashCode() {
+               return uid.hashCode();
+       }
+
+       @Override
+       public String toString() {
+               return uid + "\t" + (passed ? "passed" : "failed");
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/util/Throughput.java b/org.argeo.core/src/org/argeo/util/Throughput.java
new file mode 100644 (file)
index 0000000..266ddbc
--- /dev/null
@@ -0,0 +1,82 @@
+package org.argeo.util;
+
+import java.text.NumberFormat;
+import java.text.ParseException;
+import java.util.Locale;
+
+/** A throughput, that is, a value per unit of time. */
+public class Throughput {
+       private final static NumberFormat usNumberFormat = NumberFormat.getInstance(Locale.US);
+
+       public enum Unit {
+               s, m, h, d
+       }
+
+       private final Double value;
+       private final Unit unit;
+
+       public Throughput(Double value, Unit unit) {
+               this.value = value;
+               this.unit = unit;
+       }
+
+       public Throughput(Long periodMs, Long count, Unit unit) {
+               if (unit.equals(Unit.s))
+                       value = ((double) count * 1000d) / periodMs;
+               else if (unit.equals(Unit.m))
+                       value = ((double) count * 60d * 1000d) / periodMs;
+               else if (unit.equals(Unit.h))
+                       value = ((double) count * 60d * 60d * 1000d) / periodMs;
+               else if (unit.equals(Unit.d))
+                       value = ((double) count * 24d * 60d * 60d * 1000d) / periodMs;
+               else
+                       throw new IllegalArgumentException("Unsupported unit " + unit);
+               this.unit = unit;
+       }
+
+       public Throughput(Double value, String unitStr) {
+               this(value, Unit.valueOf(unitStr));
+       }
+
+       public Throughput(String def) {
+               int index = def.indexOf('/');
+               if (def.length() < 3 || index <= 0 || index != def.length() - 2)
+                       throw new IllegalArgumentException(
+                                       def + " no a proper throughput definition" + " (should be <value>/<unit>, e.g. 3.54/s or 1500/h");
+               String valueStr = def.substring(0, index);
+               String unitStr = def.substring(index + 1);
+               try {
+                       this.value = usNumberFormat.parse(valueStr).doubleValue();
+               } catch (ParseException e) {
+                       throw new IllegalArgumentException("Cannot parse " + valueStr + " as a number.", e);
+               }
+               this.unit = Unit.valueOf(unitStr);
+       }
+
+       public Long asMsPeriod() {
+               if (unit.equals(Unit.s))
+                       return Math.round(1000d / value);
+               else if (unit.equals(Unit.m))
+                       return Math.round((60d * 1000d) / value);
+               else if (unit.equals(Unit.h))
+                       return Math.round((60d * 60d * 1000d) / value);
+               else if (unit.equals(Unit.d))
+                       return Math.round((24d * 60d * 60d * 1000d) / value);
+               else
+                       throw new IllegalArgumentException("Unsupported unit " + unit);
+       }
+
+       @Override
+       public String toString() {
+               return usNumberFormat.format(value) + '/' + unit;
+       }
+
+       public Double getValue() {
+               return value;
+       }
+
+       public Unit getUnit() {
+               return unit;
+       }
+
+}
diff --git a/org.argeo.core/src/org/argeo/util/UuidUtils.java b/org.argeo.core/src/org/argeo/util/UuidUtils.java
new file mode 100644 (file)
index 0000000..ebe0978
--- /dev/null
@@ -0,0 +1,374 @@
+package org.argeo.util;
+
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.net.UnknownHostException;
+import java.security.SecureRandom;
+import java.time.Duration;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
+import java.util.BitSet;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * Utilities to simplify and extends usage of {@link UUID}. Only the RFC 4122
+ * variant (also known as Leach–Salz variant) is supported.
+ */
+public class UuidUtils {
+       /** Nil UUID (00000000-0000-0000-0000-000000000000). */
+       public final static UUID NIL_UUID = UUID.fromString("00000000-0000-0000-0000-000000000000");
+       public final static LocalDateTime GREGORIAN_START = LocalDateTime.of(1582, 10, 15, 0, 0, 0);
+
+       private final static long MOST_SIG_VERSION1 = (1l << 12);
+       private final static long LEAST_SIG_RFC4122_VARIANT = (1l << 63);
+
+       private final static SecureRandom RANDOM;
+       private final static AtomicInteger CLOCK_SEQUENCE;
+       private final static byte[] HARDWARE_ADDRESS;
+       /** A start timestamp to which {@link System#nanoTime()}/100 can be added. */
+       private final static long START_TIMESTAMP;
+       static {
+               RANDOM = new SecureRandom();
+               CLOCK_SEQUENCE = new AtomicInteger(RANDOM.nextInt(16384));
+               HARDWARE_ADDRESS = getHardwareAddress();
+
+               long nowVm = System.nanoTime() / 100;
+               Duration duration = Duration.between(GREGORIAN_START, LocalDateTime.now(ZoneOffset.UTC));
+               START_TIMESTAMP = (duration.getSeconds() * 10000000 + duration.getNano() / 100) - nowVm;
+       }
+
+       private static byte[] getHardwareAddress() {
+               InetAddress localHost;
+               try {
+                       localHost = InetAddress.getLocalHost();
+                       try {
+                               NetworkInterface nic = NetworkInterface.getByInetAddress(localHost);
+                               return nic.getHardwareAddress();
+                       } catch (SocketException e) {
+                               return null;
+                       }
+               } catch (UnknownHostException e) {
+                       return null;
+               }
+
+       }
+
+       public static UUID timeUUIDwithRandomNode() {
+               long timestamp = START_TIMESTAMP + System.nanoTime() / 100;
+               return timeUUID(timestamp, RANDOM);
+       }
+
+       public static UUID timeUUID(long timestamp, Random random) {
+               byte[] node = new byte[6];
+               random.nextBytes(node);
+               node[0] = (byte) (node[0] | 1);
+               long clockSequence = CLOCK_SEQUENCE.incrementAndGet();
+               return timeUUID(timestamp, clockSequence, node);
+       }
+
+       public static UUID timeUUID() {
+               long timestamp = START_TIMESTAMP + System.nanoTime() / 100;
+               return timeUUID(timestamp);
+       }
+
+       public static UUID timeUUID(long timestamp) {
+               if (HARDWARE_ADDRESS == null)
+                       return timeUUID(timestamp, RANDOM);
+               long clockSequence = CLOCK_SEQUENCE.incrementAndGet();
+               return timeUUID(timestamp, clockSequence, HARDWARE_ADDRESS);
+       }
+
+       public static UUID timeUUID(long timestamp, NetworkInterface nic) {
+               byte[] node;
+               try {
+                       node = nic.getHardwareAddress();
+               } catch (SocketException e) {
+                       throw new IllegalStateException("Cannot get hardware address", e);
+               }
+               long clockSequence = CLOCK_SEQUENCE.incrementAndGet();
+               return timeUUID(timestamp, clockSequence, node);
+       }
+
+       public static UUID timeUUID(LocalDateTime time, long clockSequence, byte[] node) {
+               Duration duration = Duration.between(GREGORIAN_START, time);
+               // Number of 100 ns intervals in one second: 1000000000 / 100 = 10000000
+               long timestamp = duration.getSeconds() * 10000000 + duration.getNano() / 100;
+               return timeUUID(timestamp, clockSequence, node);
+       }
+
+       public static UUID timeUUID(long timestamp, long clockSequence, byte[] node) {
+               assert node.length >= 6;
+
+               long mostSig = MOST_SIG_VERSION1 // base for version 1 UUID
+                               | ((timestamp & 0xFFFFFFFFL) << 32) // time_low
+                               | (((timestamp >> 32) & 0xFFFFL) << 16) // time_mid
+                               | ((timestamp >> 48) & 0x0FFFL);// time_hi_and_version
+
+               long leastSig = LEAST_SIG_RFC4122_VARIANT // base for Leach–Salz UUID
+                               | (((clockSequence & 0x3F00) >> 8) << 56) // clk_seq_hi_res
+                               | ((clockSequence & 0xFF) << 48) // clk_seq_low
+                               | (node[0] & 0xFFL) //
+                               | ((node[1] & 0xFFL) << 8) //
+                               | ((node[2] & 0xFFL) << 16) //
+                               | ((node[3] & 0xFFL) << 24) //
+                               | ((node[4] & 0xFFL) << 32) //
+                               | ((node[5] & 0xFFL) << 40); //
+//             for (int i = 0; i < 6; i++) {
+//                     leastSig = leastSig | ((node[i] & 0xFFL) << (8 * i));
+//             }
+               UUID uuid = new UUID(mostSig, leastSig);
+
+               // tests
+               assert uuid.node() == BitSet.valueOf(node).toLongArray()[0];
+               assert uuid.timestamp() == timestamp;
+               assert uuid.clockSequence() == clockSequence;
+               assert uuid.version() == 1;
+               assert uuid.variant() == 2;
+               return uuid;
+       }
+
+       @Deprecated
+       public static UUID timeBasedUUID() {
+               return timeBasedUUID(LocalDateTime.now(ZoneOffset.UTC));
+       }
+
+       @Deprecated
+       public static UUID timeBasedRandomUUID() {
+               return timeBasedRandomUUID(LocalDateTime.now(ZoneOffset.UTC), RANDOM);
+       }
+
+       @Deprecated
+       public static UUID timeBasedUUID(LocalDateTime time) {
+               if (HARDWARE_ADDRESS == null)
+                       return timeBasedRandomUUID(time, RANDOM);
+               return timeBasedUUID(time, BitSet.valueOf(HARDWARE_ADDRESS));
+       }
+
+       @Deprecated
+       public static UUID timeBasedAddressUUID(LocalDateTime time, NetworkInterface nic) throws SocketException {
+               byte[] nodeBytes = nic.getHardwareAddress();
+               BitSet node = BitSet.valueOf(nodeBytes);
+               return timeBasedUUID(time, node);
+       }
+
+       @Deprecated
+       public static UUID timeBasedRandomUUID(LocalDateTime time, Random random) {
+               byte[] nodeBytes = new byte[6];
+               random.nextBytes(nodeBytes);
+               BitSet node = BitSet.valueOf(nodeBytes);
+               // set random marker
+               node.set(0, true);
+               return timeBasedUUID(time, node);
+       }
+
+       @Deprecated
+       public static UUID timeBasedUUID(LocalDateTime time, BitSet node) {
+               // most significant
+               Duration duration = Duration.between(GREGORIAN_START, time);
+
+               // Number of 100 ns intervals in one second: 1000000000 / 100 = 10000000
+               long timeNanos = duration.getSeconds() * 10000000 + duration.getNano() / 100;
+               BitSet timeBits = BitSet.valueOf(new long[] { timeNanos });
+               assert timeBits.length() <= 60;
+
+               int clockSequence;
+               synchronized (CLOCK_SEQUENCE) {
+                       clockSequence = CLOCK_SEQUENCE.incrementAndGet();
+                       if (clockSequence > 16384)
+                               CLOCK_SEQUENCE.set(0);
+               }
+               BitSet clockSequenceBits = BitSet.valueOf(new long[] { clockSequence });
+
+               // Build the UUID, bit by bit
+               // see https://tools.ietf.org/html/rfc4122#section-4.2.2
+               // time
+               BitSet time_low = new BitSet(32);
+               BitSet time_mid = new BitSet(16);
+               BitSet time_hi_and_version = new BitSet(16);
+
+               for (int i = 0; i < 60; i++) {
+                       if (i < 32)
+                               time_low.set(i, timeBits.get(i));
+                       else if (i < 48)
+                               time_mid.set(i - 32, timeBits.get(i));
+                       else
+                               time_hi_and_version.set(i - 48, timeBits.get(i));
+               }
+               // version
+               time_hi_and_version.set(12, true);
+               time_hi_and_version.set(13, false);
+               time_hi_and_version.set(14, false);
+               time_hi_and_version.set(15, false);
+
+               // clock sequence
+               BitSet clk_seq_hi_res = new BitSet(8);
+               BitSet clk_seq_low = new BitSet(8);
+               for (int i = 0; i < 8; i++) {
+                       clk_seq_low.set(i, clockSequenceBits.get(i));
+               }
+               for (int i = 8; i < 14; i++) {
+                       clk_seq_hi_res.set(i - 8, clockSequenceBits.get(i));
+               }
+               // variant
+               clk_seq_hi_res.set(6, false);
+               clk_seq_hi_res.set(7, true);
+
+//             String str = toHexString(time_low.toLongArray()[0]) + "-" + toHexString(time_mid.toLongArray()[0]) + "-"
+//                             + toHexString(time_hi_and_version.toLongArray()[0]) + "-"
+//                             + toHexString(clock_seq_hi_and_reserved.toLongArray()[0]) + toHexString(clock_seq_low.toLongArray()[0])
+//                             + "-" + toHexString(node.toLongArray()[0]);
+//             UUID uuid = UUID.fromString(str);
+
+               BitSet uuidBits = new BitSet(128);
+               for (int i = 0; i < 128; i++) {
+                       if (i < 48)
+                               uuidBits.set(i, node.get(i));
+                       else if (i < 56)
+                               uuidBits.set(i, clk_seq_low.get(i - 48));
+                       else if (i < 64)
+                               uuidBits.set(i, clk_seq_hi_res.get(i - 56));
+                       else if (i < 80)
+                               uuidBits.set(i, time_hi_and_version.get(i - 64));
+                       else if (i < 96)
+                               uuidBits.set(i, time_mid.get(i - 80));
+                       else
+                               uuidBits.set(i, time_low.get(i - 96));
+               }
+
+               long[] uuidLongs = uuidBits.toLongArray();
+               assert uuidLongs.length == 2;
+               UUID uuid = new UUID(uuidLongs[1], uuidLongs[0]);
+
+               // tests
+               assert uuid.node() == node.toLongArray()[0];
+               assert uuid.timestamp() == timeNanos;
+               assert uuid.clockSequence() == clockSequence;
+               assert uuid.version() == 1;
+               assert uuid.variant() == 2;
+               return uuid;
+       }
+
+       public static String toBinaryString(UUID uuid, int charsPerSegment, char separator) {
+               String binaryString = toBinaryString(uuid);
+               StringBuilder sb = new StringBuilder(128 + (128 / charsPerSegment));
+               for (int i = 0; i < binaryString.length(); i++) {
+                       if (i != 0 && i % charsPerSegment == 0)
+                               sb.append(separator);
+                       sb.append(binaryString.charAt(i));
+               }
+               return sb.toString();
+       }
+
+       public static String toBinaryString(UUID uuid) {
+               String most = zeroTo64Chars(Long.toBinaryString(uuid.getMostSignificantBits()));
+               String least = zeroTo64Chars(Long.toBinaryString(uuid.getLeastSignificantBits()));
+               String binaryString = most + least;
+               assert binaryString.length() == 128;
+               return binaryString;
+       }
+
+       private static String zeroTo64Chars(String str) {
+               assert str.length() <= 64;
+               if (str.length() < 64) {
+                       StringBuilder sb = new StringBuilder(64);
+                       for (int i = 0; i < 64 - str.length(); i++)
+                               sb.append('0');
+                       sb.append(str);
+                       return sb.toString();
+               } else
+                       return str;
+       }
+
+       public static String compactToStd(String compact) {
+               if (compact.length() != 32)
+                       throw new IllegalArgumentException(
+                                       "Compact UUID '" + compact + "' has length " + compact.length() + " and not 32.");
+               StringBuilder sb = new StringBuilder(36);
+               for (int i = 0; i < 32; i++) {
+                       if (i == 8 || i == 12 || i == 16 || i == 20)
+                               sb.append('-');
+                       sb.append(compact.charAt(i));
+               }
+               String std = sb.toString();
+               assert std.length() == 36;
+               assert UUID.fromString(std).toString().equals(std);
+               return std;
+       }
+
+       public static UUID compactToUuid(String compact) {
+               return UUID.fromString(compactToStd(compact));
+       }
+
+       public static boolean isRandom(UUID uuid) {
+               return uuid.version() == 4;
+       }
+
+       public static boolean isTimeBased(UUID uuid) {
+               return uuid.version() == 1;
+       }
+
+       public static boolean isTimeBasedRandom(UUID uuid) {
+               if (uuid.version() == 1) {
+                       BitSet node = BitSet.valueOf(new long[] { uuid.node() });
+                       return node.get(0);
+               } else
+                       return false;
+       }
+
+       public static boolean isNameBased(UUID uuid) {
+               return uuid.version() == 3 || uuid.version() == 5;
+       }
+
+       /** Singleton. */
+       private UuidUtils() {
+       }
+
+       public final static void main(String[] args) throws Exception {
+               UUID uuid;
+
+//             uuid = compactToUuid("996b1f5122de4b2f94e49168d32f22d1");
+//             System.out.println(uuid.toString() + ", isRandom=" + isRandom(uuid));
+
+               // warm up before measuring perf
+               for (int i = 0; i < 10; i++) {
+                       UUID.randomUUID();
+                       timeUUID();
+                       timeUUIDwithRandomNode();
+                       timeBasedRandomUUID();
+                       timeBasedUUID();
+               }
+
+               long begin;
+               long duration;
+
+               begin = System.nanoTime();
+               uuid = UUID.randomUUID();
+               duration = System.nanoTime() - begin;
+               System.out.println(uuid.toString() + " in " + duration + " ns, isRandom=" + isRandom(uuid));
+
+               begin = System.nanoTime();
+               uuid = timeUUID();
+               duration = System.nanoTime() - begin;
+               System.out.println(uuid.toString() + " in " + duration + " ns, isTimeBasedRandom=" + isTimeBasedRandom(uuid));
+
+               begin = System.nanoTime();
+               uuid = timeUUIDwithRandomNode();
+               duration = System.nanoTime() - begin;
+               System.out.println(uuid.toString() + " in " + duration + " ns, isTimeBasedRandom=" + isTimeBasedRandom(uuid));
+
+               begin = System.nanoTime();
+               uuid = timeBasedUUID();
+               duration = System.nanoTime() - begin;
+               System.out.println(uuid.toString() + " in " + duration + " ns, isTimeBasedRandom=" + isTimeBasedRandom(uuid));
+
+               begin = System.nanoTime();
+               uuid = timeBasedRandomUUID();
+               duration = System.nanoTime() - begin;
+               System.out.println(uuid.toString() + " in " + duration + " ns, isTimeBasedRandom=" + isTimeBasedRandom(uuid));
+//             System.out.println(toBinaryString(uuid, 8, ' '));
+//             System.out.println(toBinaryString(uuid, 16, '\n'));
+       }
+}
diff --git a/org.argeo.core/src/org/argeo/util/package-info.java b/org.argeo.core/src/org/argeo/util/package-info.java
new file mode 100644 (file)
index 0000000..4354b0a
--- /dev/null
@@ -0,0 +1,2 @@
+/** Generic Java utilities. */
+package org.argeo.util;
\ No newline at end of file
index 98ffe0274c22d1d3e0355b2c415b9d5c7d255ba3..df5b3baa4390c3168d6c62784f236c4853e4c54a 100644 (file)
                        <artifactId>org.argeo.eclipse.ui</artifactId>
                        <version>2.1.89-SNAPSHOT</version>
                </dependency>
-               <dependency>
-                       <groupId>org.argeo.commons</groupId>
-                       <artifactId>org.argeo.util</artifactId>
-                       <version>2.1.89-SNAPSHOT</version>
-               </dependency>
 
                <!-- UI -->
                <dependency>
index cd379dca9e8ad05f7527557658282f3f0740ea44..b82dd50ea701f552cd6e827228934d3279744051 100644 (file)
@@ -1,4 +1,4 @@
 source.. = src/,\
            ext/test/
-additional.bundles = org.junit,\
-                     org.argeo.util
+additional.bundles = org.junit
+
index 9196aa5b478da6edaa7c699004c7e519e45d91b8..a61f69dab33513dfab2e1d35f24b68fb41000dc4 100644 (file)
@@ -9,12 +9,4 @@
        </parent>
        <artifactId>org.argeo.enterprise</artifactId>
        <name>Commons Enterprise</name>
-       <dependencies>
-               <dependency>
-                       <groupId>org.argeo.commons</groupId>
-                       <artifactId>org.argeo.util</artifactId>
-                       <version>2.1.89-SNAPSHOT</version>
-                       <scope>test</scope>
-               </dependency>
-       </dependencies>
 </project>
\ No newline at end of file
index 551686bd0c340307843e4b59d113fdcf500ffa96..4df501710b990c00327684b2974044b3d6707ca4 100644 (file)
@@ -1,11 +1,5 @@
 Provide-Capability:\
 cms.datamodel; name=jcrx; cnd=/org/argeo/jcr/jcrx.cnd; abstract=true
 
-Import-Package: junit.framework;resolution:=optional,\
-org.xml.sax;version="0.0.0",\
-org.apache.jackrabbit.*;resolution:=optional,\
-org.apache.jackrabbit.webdav.jcr;resolution:=optional,\
-org.apache.jackrabbit.webdav.server;resolution:=optional,\
-org.h2;resolution:=optional,\
-org.postgresql;resolution:=optional,\
+Import-Package:\
 *
index 027445903ad09a10ae524a8876353b12a7bf178f..acb5245c8a756eb502876d987558090d78d83194 100644 (file)
@@ -3,26 +3,3 @@ source.. = src/,\
 output.. = bin/
 bin.includes = META-INF/,\
                .
-additional.bundles = org.junit,\
-                     org.apache.jackrabbit.core,\
-                     javax.jcr,\
-                     org.apache.jackrabbit.api,\
-                     org.apache.jackrabbit.data,\
-                     org.apache.jackrabbit.jcr.commons,\
-                     org.apache.jackrabbit.spi,\
-                     org.apache.jackrabbit.spi.commons,\
-                     org.slf4j.api,\
-                     org.slf4j.log4j12,\
-                     org.apache.log4j,\
-                     org.apache.commons.collections,\
-                     EDU.oswego.cs.dl.util.concurrent,\
-                     org.apache.lucene,\
-                     org.apache.tika.core,\
-                     org.apache.commons.dbcp,\
-                     org.apache.commons.pool,\
-                     com.google.guava,\
-                     org.apache.jackrabbit.jcr2spi,\
-                     org.apache.jackrabbit.spi2dav,\
-                     org.apache.httpcomponents.httpclient,\
-                     org.apache.httpcomponents.httpcore,\
-                     org.apache.tika.parsers
diff --git a/org.argeo.jcr/ext/test/log4j.properties b/org.argeo.jcr/ext/test/log4j.properties
deleted file mode 100644 (file)
index 3d75289..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-log4j.rootLogger=WARN, console
-
-## Levels
-log4j.logger.org.argeo=DEBUG
-log4j.logger.org.apache.jackrabbit=OFF
-
-## Appenders
-# console is set to be a ConsoleAppender.
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-
-# console uses PatternLayout.
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-#log4j.appender.console.layout.ConversionPattern= %-5p %d{ISO8601} %m - %c%n
-log4j.appender.console.layout.ConversionPattern=%m%n
diff --git a/org.argeo.jcr/ext/test/org/argeo/fs/FsUtilsTest.java b/org.argeo.jcr/ext/test/org/argeo/fs/FsUtilsTest.java
deleted file mode 100644 (file)
index 793216b..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-package org.argeo.fs;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-
-/** {@link FsUtils} tests. */
-public class FsUtilsTest {
-       final static String FILE00 = "file00";
-       final static String FILE01 = "file01";
-       final static String SUB_DIR = "subDir";
-
-       public void testDelete() throws IOException {
-               Path dir = createDir00();
-               assert Files.exists(dir);
-               FsUtils.delete(dir);
-               assert !Files.exists(dir);
-       }
-
-       public void testSync() throws IOException {
-               Path source = createDir00();
-               Path target = Files.createTempDirectory(getClass().getName());
-               FsUtils.sync(source, target);
-               assert Files.exists(target.resolve(FILE00));
-               assert Files.exists(target.resolve(SUB_DIR));
-               assert Files.exists(target.resolve(SUB_DIR + File.separator + FILE01));
-               FsUtils.delete(source.resolve(SUB_DIR));
-               FsUtils.sync(source, target, true);
-               assert Files.exists(target.resolve(FILE00));
-               assert !Files.exists(target.resolve(SUB_DIR));
-               assert !Files.exists(target.resolve(SUB_DIR + File.separator + FILE01));
-
-               // clean up
-               FsUtils.delete(source);
-               FsUtils.delete(target);
-
-       }
-
-       Path createDir00() throws IOException {
-               Path base = Files.createTempDirectory(getClass().getName());
-               base.toFile().deleteOnExit();
-               Files.createFile(base.resolve(FILE00)).toFile().deleteOnExit();
-               Path subDir = Files.createDirectories(base.resolve(SUB_DIR));
-               subDir.toFile().deleteOnExit();
-               Files.createFile(subDir.resolve(FILE01)).toFile().deleteOnExit();
-               return base;
-       }
-}
diff --git a/org.argeo.jcr/ext/test/org/argeo/jcr/fs/JcrFileSystemTest.java b/org.argeo.jcr/ext/test/org/argeo/jcr/fs/JcrFileSystemTest.java
deleted file mode 100644 (file)
index 2d03b8f..0000000
+++ /dev/null
@@ -1,191 +0,0 @@
-package org.argeo.jcr.fs;
-
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.net.URI;
-import java.nio.file.DirectoryStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.nio.file.attribute.FileTime;
-import java.nio.file.spi.FileSystemProvider;
-import java.util.Arrays;
-import java.util.Map;
-
-import javax.jcr.Property;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.nodetype.NodeType;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.core.RepositoryImpl;
-import org.argeo.jackrabbit.fs.JackrabbitMemoryFsProvider;
-
-import junit.framework.TestCase;
-
-public class JcrFileSystemTest extends TestCase {
-       private final static Log log = LogFactory.getLog(JcrFileSystemTest.class);
-
-       public void testMounts() throws Exception {
-               JackrabbitMemoryFsProvider fsProvider = new JackrabbitMemoryFsProvider() {
-
-                       @Override
-                       protected void postRepositoryCreation(RepositoryImpl repositoryImpl) throws RepositoryException {
-                               // create workspace
-                               Session session = login();
-                               session.getWorkspace().createWorkspace("test");
-                       }
-
-               };
-
-               Path rootPath = fsProvider.getPath(new URI("jcr+memory:/"));
-               log.debug("Got root " + rootPath);
-               Path testDir = rootPath.resolve("testDir");
-               Files.createDirectory(testDir);
-
-               Path testMount = fsProvider.getPath(new URI("jcr+memory:/test"));
-               log.debug("Test path");
-               assertEquals(rootPath, testMount.getParent());
-               assertEquals(testMount.getFileName(), rootPath.relativize(testMount));
-
-               Path testPath = testMount.resolve("test.txt");
-               log.debug("Create file " + testPath);
-               Files.createFile(testPath);
-               BasicFileAttributes bfa = Files.readAttributes(testPath, BasicFileAttributes.class);
-               FileTime ft = bfa.creationTime();
-               assertNotNull(ft);
-               assertTrue(bfa.isRegularFile());
-               log.debug("Created " + testPath + " (" + ft + ")");
-               Files.delete(testPath);
-               log.debug("Deleted " + testPath);
-
-               // Browse directories from root
-               DirectoryStream<Path> files = Files.newDirectoryStream(rootPath);
-               int directoryCount = 0;
-               for (Path file : files) {
-                       if (Files.isDirectory(file)) {
-                               directoryCount++;
-                       }
-               }
-               assertEquals(2, directoryCount);
-
-               // Browse directories from mount
-               Path mountSubDir = testMount.resolve("mountSubDir");
-               Files.createDirectory(mountSubDir);
-               Path otherSubDir = testMount.resolve("otherSubDir");
-               Files.createDirectory(otherSubDir);
-               testPath = testMount.resolve("test.txt");
-               Files.createFile(testPath);
-               files = Files.newDirectoryStream(testMount);
-               int fileCount = 0;
-               for (Path file : files) {
-                       fileCount++;
-               }
-               assertEquals(3, fileCount);
-
-       }
-
-       public void testSimple() throws Exception {
-               FileSystemProvider fsProvider = new JackrabbitMemoryFsProvider();
-
-               // Simple file
-               Path rootPath = fsProvider.getPath(new URI("jcr+memory:/"));
-               log.debug("Got root " + rootPath);
-               Path testPath = fsProvider.getPath(new URI("jcr+memory:/test.txt"));
-               log.debug("Test path");
-               assertEquals("test.txt", testPath.getFileName().toString());
-               assertEquals(rootPath, testPath.getParent());
-               assertEquals(testPath.getFileName(), rootPath.relativize(testPath));
-               // relativize self should be empty path
-               Path selfRelative = testPath.relativize(testPath);
-               assertEquals("", selfRelative.toString());
-
-               log.debug("Create file " + testPath);
-               Files.createFile(testPath);
-               BasicFileAttributes bfa = Files.readAttributes(testPath, BasicFileAttributes.class);
-               FileTime ft = bfa.creationTime();
-               assertNotNull(ft);
-               assertTrue(bfa.isRegularFile());
-               log.debug("Created " + testPath + " (" + ft + ")");
-               Files.delete(testPath);
-               log.debug("Deleted " + testPath);
-               String txt = "TEST\nTEST2\n";
-               byte[] arr = txt.getBytes();
-               Files.write(testPath, arr);
-               log.debug("Wrote " + testPath);
-               byte[] read = Files.readAllBytes(testPath);
-               assertTrue(Arrays.equals(arr, read));
-               assertEquals(txt, new String(read));
-               log.debug("Read " + testPath);
-               Path testDir = rootPath.resolve("testDir");
-               log.debug("Resolved " + testDir);
-               // Copy
-               Files.createDirectory(testDir);
-               log.debug("Created directory " + testDir);
-               Path subsubdir = Files.createDirectories(testDir.resolve("subdir/subsubdir"));
-               log.debug("Created sub directories " + subsubdir);
-               Path copiedFile = testDir.resolve("copiedFile.txt");
-               log.debug("Resolved " + copiedFile);
-               Path relativeCopiedFile = testDir.relativize(copiedFile);
-               assertEquals(copiedFile.getFileName().toString(), relativeCopiedFile.toString());
-               log.debug("Relative copied file " + relativeCopiedFile);
-               try (OutputStream out = Files.newOutputStream(copiedFile); InputStream in = Files.newInputStream(testPath)) {
-                       IOUtils.copy(in, out);
-               }
-               log.debug("Copied " + testPath + " to " + copiedFile);
-               Files.delete(testPath);
-               log.debug("Deleted " + testPath);
-               byte[] copiedRead = Files.readAllBytes(copiedFile);
-               assertTrue(Arrays.equals(copiedRead, read));
-               log.debug("Read " + copiedFile);
-               // Browse directories
-               DirectoryStream<Path> files = Files.newDirectoryStream(testDir);
-               int fileCount = 0;
-               Path listedFile = null;
-               for (Path file : files) {
-                       fileCount++;
-                       if (!Files.isDirectory(file))
-                               listedFile = file;
-               }
-               assertEquals(2, fileCount);
-               assertEquals(copiedFile, listedFile);
-               assertEquals(copiedFile.toString(), listedFile.toString());
-               log.debug("Listed " + testDir);
-               // Generic attributes
-               Map<String, Object> attrs = Files.readAttributes(copiedFile, "*");
-               assertEquals(3, attrs.size());
-               log.debug("Read attributes of " + copiedFile + ": " + attrs.keySet());
-               // Direct node access
-               NodeFileAttributes nfa = Files.readAttributes(copiedFile, NodeFileAttributes.class);
-               nfa.getNode().addMixin(NodeType.MIX_LANGUAGE);
-               nfa.getNode().getSession().save();
-               log.debug("Add mix:language");
-               Files.setAttribute(copiedFile, Property.JCR_LANGUAGE, "fr");
-               log.debug("Set language");
-               attrs = Files.readAttributes(copiedFile, "*");
-               assertEquals(4, attrs.size());
-               log.debug("Read attributes of " + copiedFile + ": " + attrs.keySet());
-       }
-
-       public void testIllegalCharacters() throws Exception {
-               FileSystemProvider fsProvider = new JackrabbitMemoryFsProvider();
-               String fileName = "tüßçt[1].txt";
-               String pathStr = "/testDir/" + fileName;
-               Path testDir = fsProvider.getPath(new URI("jcr+memory:/testDir"));
-               Files.createDirectory(testDir);
-               Path testPath = testDir.resolve(fileName);
-               assertEquals(pathStr, testPath.toString());
-               Files.createFile(testPath);
-               DirectoryStream<Path> files = Files.newDirectoryStream(testDir);
-               Path listedPath = files.iterator().next();
-               assertEquals(pathStr, listedPath.toString());
-
-               String dirName = "*[~WeirdDir~]*";
-               Path subDir = testDir.resolve(dirName);
-               Files.createDirectory(subDir);
-               subDir = testDir.resolve(dirName);
-               assertEquals(dirName, subDir.getFileName().toString());
-       }
-}
index d8587aa0240b91efce635bd51bba88e472e2b074..60b5daa853771247c4f9cc06c95be9ec186c8003 100644 (file)
@@ -8,11 +8,4 @@
        </parent>
        <artifactId>org.argeo.jcr</artifactId>
        <name>Commons JCR</name>
-       <dependencies>
-               <dependency>
-                       <groupId>org.argeo.commons</groupId>
-                       <artifactId>org.argeo.util</artifactId>
-                       <version>2.1.89-SNAPSHOT</version>
-               </dependency>
-       </dependencies>
 </project>
\ No newline at end of file
diff --git a/org.argeo.jcr/repository.xml b/org.argeo.jcr/repository.xml
deleted file mode 100644 (file)
index 745079e..0000000
+++ /dev/null
@@ -1,152 +0,0 @@
-<?xml version="1.0"?>\r
-<!--\r
-   Licensed to the Apache Software Foundation (ASF) under one or more\r
-   contributor license agreements.  See the NOTICE file distributed with\r
-   this work for additional information regarding copyright ownership.\r
-   The ASF licenses this file to You under the Apache License, Version 2.0\r
-   (the "License"); you may not use this file except in compliance with\r
-   the License.  You may obtain a copy of the License at\r
-\r
-       http://www.apache.org/licenses/LICENSE-2.0\r
-\r
-   Unless required by applicable law or agreed to in writing, software\r
-   distributed under the License is distributed on an "AS IS" BASIS,\r
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
-   See the License for the specific language governing permissions and\r
-   limitations under the License.\r
--->\r
-\r
-<!DOCTYPE Repository\r
-          PUBLIC "-//The Apache Software Foundation//DTD Jackrabbit 2.0//EN"\r
-          "http://jackrabbit.apache.org/dtd/repository-2.0.dtd">\r
-\r
-<!-- Example Repository Configuration File\r
-     Used by\r
-     - org.apache.jackrabbit.core.config.RepositoryConfigTest.java\r
-     -\r
--->\r
-<Repository>\r
-    <!--\r
-        virtual file system where the repository stores global state\r
-        (e.g. registered namespaces, custom node types, etc.)\r
-    -->\r
-    <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">\r
-        <param name="path" value="${rep.home}/repository"/>\r
-    </FileSystem>\r
-\r
-    <!--\r
-        data store configuration\r
-    -->\r
-    <DataStore class="org.apache.jackrabbit.core.data.FileDataStore"/>\r
-\r
-    <!--\r
-        security configuration\r
-    -->\r
-    <Security appName="Jackrabbit">\r
-        <!--\r
-            security manager:\r
-            class: FQN of class implementing the JackrabbitSecurityManager interface\r
-        -->\r
-        <SecurityManager class="org.apache.jackrabbit.core.DefaultSecurityManager" workspaceName="security">\r
-            <!--\r
-            workspace access:\r
-            class: FQN of class implementing the WorkspaceAccessManager interface\r
-            -->\r
-            <!-- <WorkspaceAccessManager class="..."/> -->\r
-            <!-- <param name="config" value="${rep.home}/security.xml"/> -->\r
-        </SecurityManager>\r
-\r
-        <!--\r
-            access manager:\r
-            class: FQN of class implementing the AccessManager interface\r
-        -->\r
-        <AccessManager class="org.apache.jackrabbit.core.security.DefaultAccessManager">\r
-            <!-- <param name="config" value="${rep.home}/access.xml"/> -->\r
-        </AccessManager>\r
-\r
-        <LoginModule class="org.apache.jackrabbit.core.security.authentication.DefaultLoginModule">\r
-           <!-- \r
-              anonymous user name ('anonymous' is the default value)\r
-            -->\r
-           <param name="anonymousId" value="anonymous"/>\r
-           <!--\r
-              administrator user id (default value if param is missing is 'admin')\r
-            -->\r
-           <param name="adminId" value="admin"/>\r
-        </LoginModule>\r
-    </Security>\r
-\r
-    <!--\r
-        location of workspaces root directory and name of default workspace\r
-    -->\r
-    <Workspaces rootPath="${rep.home}/workspaces" defaultWorkspace="default"/>\r
-    <!--\r
-        workspace configuration template:\r
-        used to create the initial workspace if there's no workspace yet\r
-    -->\r
-    <Workspace name="${wsp.name}">\r
-        <!--\r
-            virtual file system of the workspace:\r
-            class: FQN of class implementing the FileSystem interface\r
-        -->\r
-        <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">\r
-            <param name="path" value="${wsp.home}"/>\r
-        </FileSystem>\r
-        <!--\r
-            persistence manager of the workspace:\r
-            class: FQN of class implementing the PersistenceManager interface\r
-        -->\r
-        <PersistenceManager class="org.apache.jackrabbit.core.persistence.pool.DerbyPersistenceManager">\r
-          <param name="url" value="jdbc:derby:${wsp.home}/db;create=true"/>\r
-          <param name="schemaObjectPrefix" value="${wsp.name}_"/>\r
-        </PersistenceManager>\r
-        <!--\r
-            Search index and the file system it uses.\r
-            class: FQN of class implementing the QueryHandler interface\r
-        -->\r
-        <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">\r
-            <param name="path" value="${wsp.home}/index"/>\r
-            <param name="supportHighlighting" value="true"/>\r
-        </SearchIndex>\r
-    </Workspace>\r
-\r
-    <!--\r
-        Configures the versioning\r
-    -->\r
-    <Versioning rootPath="${rep.home}/version">\r
-        <!--\r
-            Configures the filesystem to use for versioning for the respective\r
-            persistence manager\r
-        -->\r
-        <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">\r
-            <param name="path" value="${rep.home}/version" />\r
-        </FileSystem>\r
-\r
-        <!--\r
-            Configures the persistence manager to be used for persisting version state.\r
-            Please note that the current versioning implementation is based on\r
-            a 'normal' persistence manager, but this could change in future\r
-            implementations.\r
-        -->\r
-        <PersistenceManager class="org.apache.jackrabbit.core.persistence.pool.DerbyPersistenceManager">\r
-          <param name="url" value="jdbc:derby:${rep.home}/version/db;create=true"/>\r
-          <param name="schemaObjectPrefix" value="version_"/>\r
-        </PersistenceManager>\r
-    </Versioning>\r
-\r
-    <!--\r
-        Search index for content that is shared repository wide\r
-        (/jcr:system tree, contains mainly versions)\r
-    -->\r
-    <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">\r
-        <param name="path" value="${rep.home}/repository/index"/>\r
-        <param name="supportHighlighting" value="true"/>\r
-    </SearchIndex>\r
-\r
-    <!--\r
-        Run with a cluster journal\r
-    -->\r
-    <Cluster id="node1">\r
-        <Journal class="org.apache.jackrabbit.core.journal.MemoryJournal"/>\r
-    </Cluster>\r
-</Repository>\r
diff --git a/org.argeo.jcr/repository/repository/meta/rootUUID b/org.argeo.jcr/repository/repository/meta/rootUUID
deleted file mode 100644 (file)
index df09293..0000000
+++ /dev/null
@@ -1 +0,0 @@
-cafebabe-cafe-babe-cafe-babecafebabe
\ No newline at end of file
diff --git a/org.argeo.jcr/repository/repository/namespaces/ns_idx.properties b/org.argeo.jcr/repository/repository/namespaces/ns_idx.properties
deleted file mode 100644 (file)
index 7e757f0..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-#Fri Oct 28 20:14:30 CEST 2016
-http\://www.jcp.org/jcr/1.0=1570322
-internal=16762557
-http\://www.jcp.org/jcr/sv/1.0=16463688
-http\://www.jcp.org/jcr/mix/1.0=14361695
-http\://www.jcp.org/jcr/nt/1.0=5688619
-.empty.key=0
-http\://www.w3.org/XML/1998/namespace=6829023
diff --git a/org.argeo.jcr/repository/repository/namespaces/ns_reg.properties b/org.argeo.jcr/repository/repository/namespaces/ns_reg.properties
deleted file mode 100644 (file)
index f40bf21..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-#Fri Oct 28 20:14:30 CEST 2016
-jcr=http\://www.jcp.org/jcr/1.0
-sv=http\://www.jcp.org/jcr/sv/1.0
-xml=http\://www.w3.org/XML/1998/namespace
-nt=http\://www.jcp.org/jcr/nt/1.0
-mix=http\://www.jcp.org/jcr/mix/1.0
-rep=internal
-.empty.key=
diff --git a/org.argeo.jcr/repository/workspaces/default/workspace.xml b/org.argeo.jcr/repository/workspaces/default/workspace.xml
deleted file mode 100644 (file)
index a32f9c7..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?><Workspace name="default">
-        <!--
-            virtual file system of the workspace:
-            class: FQN of class implementing the FileSystem interface
-        -->
-        <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
-            <param name="path" value="${wsp.home}"/>
-        </FileSystem>
-        <!--
-            persistence manager of the workspace:
-            class: FQN of class implementing the PersistenceManager interface
-        -->
-        <PersistenceManager class="org.apache.jackrabbit.core.persistence.pool.DerbyPersistenceManager">
-          <param name="url" value="jdbc:derby:${wsp.home}/db;create=true"/>
-          <param name="schemaObjectPrefix" value="${wsp.name}_"/>
-        </PersistenceManager>
-        <!--
-            Search index and the file system it uses.
-            class: FQN of class implementing the QueryHandler interface
-        -->
-        <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-            <param name="path" value="${wsp.home}/index"/>
-            <param name="supportHighlighting" value="true"/>
-        </SearchIndex>
-    </Workspace>
diff --git a/org.argeo.jcr/src/org/argeo/fs/BasicSyncFileVisitor.java b/org.argeo.jcr/src/org/argeo/fs/BasicSyncFileVisitor.java
deleted file mode 100644 (file)
index 03bac59..0000000
+++ /dev/null
@@ -1,164 +0,0 @@
-package org.argeo.fs;
-
-import java.io.IOException;
-import java.nio.file.FileVisitResult;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.SimpleFileVisitor;
-import java.nio.file.StandardCopyOption;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.nio.file.attribute.FileTime;
-
-import org.argeo.sync.SyncResult;
-
-/** Synchronises two directory structures. */
-public class BasicSyncFileVisitor extends SimpleFileVisitor<Path> {
-       // TODO make it configurable
-       private boolean trace = false;
-
-       private final Path sourceBasePath;
-       private final Path targetBasePath;
-       private final boolean delete;
-       private final boolean recursive;
-
-       private SyncResult<Path> syncResult = new SyncResult<>();
-
-       public BasicSyncFileVisitor(Path sourceBasePath, Path targetBasePath, boolean delete, boolean recursive) {
-               this.sourceBasePath = sourceBasePath;
-               this.targetBasePath = targetBasePath;
-               this.delete = delete;
-               this.recursive = recursive;
-       }
-
-       @Override
-       public FileVisitResult preVisitDirectory(Path sourceDir, BasicFileAttributes attrs) throws IOException {
-               if (!recursive && !sourceDir.equals(sourceBasePath))
-                       return FileVisitResult.SKIP_SUBTREE;
-               Path targetDir = toTargetPath(sourceDir);
-               Files.createDirectories(targetDir);
-               return FileVisitResult.CONTINUE;
-       }
-
-       @Override
-       public FileVisitResult postVisitDirectory(Path sourceDir, IOException exc) throws IOException {
-               if (delete) {
-                       Path targetDir = toTargetPath(sourceDir);
-                       for (Path targetPath : Files.newDirectoryStream(targetDir)) {
-                               Path sourcePath = sourceDir.resolve(targetPath.getFileName());
-                               if (!Files.exists(sourcePath)) {
-                                       try {
-                                               FsUtils.delete(targetPath);
-                                               deleted(targetPath);
-                                       } catch (Exception e) {
-                                               deleteFailed(targetPath, exc);
-                                       }
-                               }
-                       }
-               }
-               return FileVisitResult.CONTINUE;
-       }
-
-       @Override
-       public FileVisitResult visitFile(Path sourceFile, BasicFileAttributes attrs) throws IOException {
-               Path targetFile = toTargetPath(sourceFile);
-               try {
-                       if (!Files.exists(targetFile)) {
-                               Files.copy(sourceFile, targetFile);
-                               added(sourceFile, targetFile);
-                       } else {
-                               if (shouldOverwrite(sourceFile, targetFile)) {
-                                       Files.copy(sourceFile, targetFile, StandardCopyOption.REPLACE_EXISTING);
-                               }
-                       }
-               } catch (Exception e) {
-                       copyFailed(sourceFile, targetFile, e);
-               }
-               return FileVisitResult.CONTINUE;
-       }
-
-       protected boolean shouldOverwrite(Path sourceFile, Path targetFile) throws IOException {
-               long sourceSize = Files.size(sourceFile);
-               long targetSize = Files.size(targetFile);
-               if (sourceSize != targetSize) {
-                       return true;
-               }
-               FileTime sourceLastModif = Files.getLastModifiedTime(sourceFile);
-               FileTime targetLastModif = Files.getLastModifiedTime(targetFile);
-               if (sourceLastModif.compareTo(targetLastModif) > 0)
-                       return true;
-               return shouldOverwriteLaterSameSize(sourceFile, targetFile);
-       }
-
-       protected boolean shouldOverwriteLaterSameSize(Path sourceFile, Path targetFile) {
-               return false;
-       }
-
-//     @Override
-//     public FileVisitResult visitFileFailed(Path sourceFile, IOException exc) throws IOException {
-//             error("Cannot sync " + sourceFile, exc);
-//             return FileVisitResult.CONTINUE;
-//     }
-
-       private Path toTargetPath(Path sourcePath) {
-               Path relativePath = sourceBasePath.relativize(sourcePath);
-               Path targetPath = targetBasePath.resolve(relativePath.toString());
-               return targetPath;
-       }
-
-       public Path getSourceBasePath() {
-               return sourceBasePath;
-       }
-
-       public Path getTargetBasePath() {
-               return targetBasePath;
-       }
-
-       protected void added(Path sourcePath, Path targetPath) {
-               syncResult.getAdded().add(targetPath);
-               if (isTraceEnabled())
-                       trace("Added " + sourcePath + " as " + targetPath);
-       }
-
-       protected void modified(Path sourcePath, Path targetPath) {
-               syncResult.getModified().add(targetPath);
-               if (isTraceEnabled())
-                       trace("Overwritten from " + sourcePath + " to " + targetPath);
-       }
-
-       protected void copyFailed(Path sourcePath, Path targetPath, Exception e) {
-               syncResult.addError(sourcePath, targetPath, e);
-               if (isTraceEnabled())
-                       error("Cannot copy " + sourcePath + " to " + targetPath, e);
-       }
-
-       protected void deleted(Path targetPath) {
-               syncResult.getDeleted().add(targetPath);
-               if (isTraceEnabled())
-                       trace("Deleted " + targetPath);
-       }
-
-       protected void deleteFailed(Path targetPath, Exception e) {
-               syncResult.addError(null, targetPath, e);
-               if (isTraceEnabled())
-                       error("Cannot delete " + targetPath, e);
-       }
-
-       /** Log error. */
-       protected void error(Object obj, Throwable e) {
-               System.err.println(obj);
-               e.printStackTrace();
-       }
-
-       protected boolean isTraceEnabled() {
-               return trace;
-       }
-
-       protected void trace(Object obj) {
-               System.out.println(obj);
-       }
-
-       public SyncResult<Path> getSyncResult() {
-               return syncResult;
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/fs/FsUtils.java b/org.argeo.jcr/src/org/argeo/fs/FsUtils.java
deleted file mode 100644 (file)
index c96f56e..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-package org.argeo.fs;
-
-import java.io.IOException;
-import java.nio.file.FileVisitResult;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.SimpleFileVisitor;
-import java.nio.file.attribute.BasicFileAttributes;
-
-/** Utilities around the standard Java file abstractions. */
-public class FsUtils {
-       /** Sync a source path with a target path. */
-       public static void sync(Path sourceBasePath, Path targetBasePath) {
-               sync(sourceBasePath, targetBasePath, false);
-       }
-
-       /** Sync a source path with a target path. */
-       public static void sync(Path sourceBasePath, Path targetBasePath, boolean delete) {
-               sync(new BasicSyncFileVisitor(sourceBasePath, targetBasePath, delete, true));
-       }
-
-       public static void sync(BasicSyncFileVisitor syncFileVisitor) {
-               try {
-                       Files.walkFileTree(syncFileVisitor.getSourceBasePath(), syncFileVisitor);
-               } catch (Exception e) {
-                       throw new RuntimeException("Cannot sync " + syncFileVisitor.getSourceBasePath() + " with "
-                                       + syncFileVisitor.getTargetBasePath(), e);
-               }
-       }
-
-       /** Deletes this path, recursively if needed. */
-       public static void delete(Path path) {
-               try {
-                       Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
-                               @Override
-                               public FileVisitResult postVisitDirectory(Path directory, IOException e) throws IOException {
-                                       if (e != null)
-                                               throw e;
-                                       Files.delete(directory);
-                                       return FileVisitResult.CONTINUE;
-                               }
-
-                               @Override
-                               public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
-                                       Files.delete(file);
-                                       return FileVisitResult.CONTINUE;
-                               }
-                       });
-               } catch (IOException e) {
-                       throw new RuntimeException("Cannot delete " + path, e);
-               }
-       }
-
-       /** Singleton. */
-       private FsUtils() {
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/fs/package-info.java b/org.argeo.jcr/src/org/argeo/fs/package-info.java
deleted file mode 100644 (file)
index ea2de9e..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Generic file system utilities. */
-package org.argeo.fs;
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/JackrabbitAdminLoginModule.java b/org.argeo.jcr/src/org/argeo/jackrabbit/JackrabbitAdminLoginModule.java
deleted file mode 100644 (file)
index 7396c87..0000000
+++ /dev/null
@@ -1,48 +0,0 @@
-package org.argeo.jackrabbit;
-
-import java.util.Map;
-
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.login.LoginException;
-import javax.security.auth.spi.LoginModule;
-
-import org.apache.jackrabbit.core.security.SecurityConstants;
-import org.apache.jackrabbit.core.security.principal.AdminPrincipal;
-
-@Deprecated
-public class JackrabbitAdminLoginModule implements LoginModule {
-       private Subject subject;
-
-       @Override
-       public void initialize(Subject subject, CallbackHandler callbackHandler,
-                       Map<String, ?> sharedState, Map<String, ?> options) {
-               this.subject = subject;
-       }
-
-       @Override
-       public boolean login() throws LoginException {
-               // TODO check permission?
-               return true;
-       }
-
-       @Override
-       public boolean commit() throws LoginException {
-               subject.getPrincipals().add(
-                               new AdminPrincipal(SecurityConstants.ADMIN_ID));
-               return true;
-       }
-
-       @Override
-       public boolean abort() throws LoginException {
-               return true;
-       }
-
-       @Override
-       public boolean logout() throws LoginException {
-               subject.getPrincipals().removeAll(
-                               subject.getPrincipals(AdminPrincipal.class));
-               return true;
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/JackrabbitDataModelMigration.java b/org.argeo.jcr/src/org/argeo/jackrabbit/JackrabbitDataModelMigration.java
deleted file mode 100644 (file)
index 838446d..0000000
+++ /dev/null
@@ -1,166 +0,0 @@
-package org.argeo.jackrabbit;
-
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.URL;
-
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.jackrabbit.commons.cnd.CndImporter;
-import org.apache.jackrabbit.commons.cnd.ParseException;
-import org.apache.jackrabbit.core.config.RepositoryConfig;
-import org.argeo.jcr.JcrException;
-import org.argeo.jcr.JcrCallback;
-import org.argeo.jcr.JcrUtils;
-
-/** Migrate the data in a Jackrabbit repository. */
-@Deprecated
-public class JackrabbitDataModelMigration implements Comparable<JackrabbitDataModelMigration> {
-//     private final static Log log = LogFactory.getLog(JackrabbitDataModelMigration.class);
-
-       private String dataModelNodePath;
-       private String targetVersion;
-       private URL migrationCnd;
-       private JcrCallback dataModification;
-
-       /**
-        * Expects an already started repository with the old data model to migrate.
-        * Expects to be run with admin rights (Repository.login() will be used).
-        * 
-        * @return true if a migration was performed and the repository needs to be
-        *         restarted and its caches cleared.
-        */
-       public Boolean migrate(Session session) {
-               long begin = System.currentTimeMillis();
-               Reader reader = null;
-               try {
-                       // check if already migrated
-                       if (!session.itemExists(dataModelNodePath)) {
-//                             log.warn("Node " + dataModelNodePath + " does not exist: nothing to migrate.");
-                               return false;
-                       }
-//                     Node dataModelNode = session.getNode(dataModelNodePath);
-//                     if (dataModelNode.hasProperty(ArgeoNames.ARGEO_DATA_MODEL_VERSION)) {
-//                             String currentVersion = dataModelNode.getProperty(
-//                                             ArgeoNames.ARGEO_DATA_MODEL_VERSION).getString();
-//                             if (compareVersions(currentVersion, targetVersion) >= 0) {
-//                                     log.info("Data model at version " + currentVersion
-//                                                     + ", no need to migrate.");
-//                                     return false;
-//                             }
-//                     }
-
-                       // apply transitional CND
-                       if (migrationCnd != null) {
-                               reader = new InputStreamReader(migrationCnd.openStream());
-                               CndImporter.registerNodeTypes(reader, session, true);
-                               session.save();
-//                             log.info("Registered migration node types from " + migrationCnd);
-                       }
-
-                       // modify data
-                       dataModification.execute(session);
-
-                       // apply changes
-                       session.save();
-
-                       long duration = System.currentTimeMillis() - begin;
-//                     log.info("Migration of data model " + dataModelNodePath + " to " + targetVersion + " performed in "
-//                                     + duration + "ms");
-                       return true;
-               } catch (RepositoryException e) {
-                       JcrUtils.discardQuietly(session);
-                       throw new JcrException(
-                                       "Migration of data model " + dataModelNodePath + " to " + targetVersion + " failed.", e);
-               } catch (ParseException | IOException e) {
-                       JcrUtils.discardQuietly(session);
-                       throw new RuntimeException(
-                                       "Migration of data model " + dataModelNodePath + " to " + targetVersion + " failed.", e);
-               } finally {
-                       JcrUtils.logoutQuietly(session);
-                       IOUtils.closeQuietly(reader);
-               }
-       }
-
-       protected static int compareVersions(String version1, String version2) {
-               // TODO do a proper version analysis and comparison
-               return version1.compareTo(version2);
-       }
-
-       /** To be called on a stopped repository. */
-       public static void clearRepositoryCaches(RepositoryConfig repositoryConfig) {
-               try {
-                       String customeNodeTypesPath = "/nodetypes/custom_nodetypes.xml";
-                       // FIXME causes weird error in Eclipse
-//                      repositoryConfig.getFileSystem().deleteFile(customeNodeTypesPath);
-//                     if (log.isDebugEnabled())
-//                             log.debug("Cleared " + customeNodeTypesPath);
-               } catch (RuntimeException e) {
-                       throw e;
-               }
-
-               // File customNodeTypes = new File(home.getPath()
-               // + "/repository/nodetypes/custom_nodetypes.xml");
-               // if (customNodeTypes.exists()) {
-               // customNodeTypes.delete();
-               // if (log.isDebugEnabled())
-               // log.debug("Cleared " + customNodeTypes);
-               // } else {
-               // log.warn("File " + customNodeTypes + " not found.");
-               // }
-       }
-
-       /*
-        * FOR USE IN (SORTED) SETS
-        */
-
-       public int compareTo(JackrabbitDataModelMigration dataModelMigration) {
-               // TODO make ordering smarter
-               if (dataModelNodePath.equals(dataModelMigration.dataModelNodePath))
-                       return compareVersions(targetVersion, dataModelMigration.targetVersion);
-               else
-                       return dataModelNodePath.compareTo(dataModelMigration.dataModelNodePath);
-       }
-
-       @Override
-       public boolean equals(Object obj) {
-               if (!(obj instanceof JackrabbitDataModelMigration))
-                       return false;
-               JackrabbitDataModelMigration dataModelMigration = (JackrabbitDataModelMigration) obj;
-               return dataModelNodePath.equals(dataModelMigration.dataModelNodePath)
-                               && targetVersion.equals(dataModelMigration.targetVersion);
-       }
-
-       @Override
-       public int hashCode() {
-               return targetVersion.hashCode();
-       }
-
-       public void setDataModelNodePath(String dataModelNodePath) {
-               this.dataModelNodePath = dataModelNodePath;
-       }
-
-       public void setTargetVersion(String targetVersion) {
-               this.targetVersion = targetVersion;
-       }
-
-       public void setMigrationCnd(URL migrationCnd) {
-               this.migrationCnd = migrationCnd;
-       }
-
-       public void setDataModification(JcrCallback dataModification) {
-               this.dataModification = dataModification;
-       }
-
-       public String getDataModelNodePath() {
-               return dataModelNodePath;
-       }
-
-       public String getTargetVersion() {
-               return targetVersion;
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryFactory.java b/org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryFactory.java
deleted file mode 100644 (file)
index 77ad527..0000000
+++ /dev/null
@@ -1,26 +0,0 @@
-package org.argeo.jackrabbit.client;
-
-import java.util.Map;
-
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.RepositoryFactory;
-
-import org.apache.jackrabbit.jcr2spi.Jcr2spiRepositoryFactory;
-import org.apache.jackrabbit.jcr2spi.RepositoryImpl;
-import org.apache.jackrabbit.spi.RepositoryServiceFactory;
-
-/** A customised {@link RepositoryFactory} access a remote DAVEX service. */
-public class ClientDavexRepositoryFactory implements RepositoryFactory {
-       public final static String JACKRABBIT_DAVEX_URI = ClientDavexRepositoryServiceFactory.PARAM_REPOSITORY_URI;
-       public final static String JACKRABBIT_REMOTE_DEFAULT_WORKSPACE = ClientDavexRepositoryServiceFactory.PARAM_WORKSPACE_NAME_DEFAULT;
-
-       @SuppressWarnings("rawtypes")
-       @Override
-       public Repository getRepository(Map parameters) throws RepositoryException {
-               RepositoryServiceFactory repositoryServiceFactory = new ClientDavexRepositoryServiceFactory();
-               return RepositoryImpl
-                               .create(new Jcr2spiRepositoryFactory.RepositoryConfigImpl(repositoryServiceFactory, parameters));
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryService.java b/org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryService.java
deleted file mode 100644 (file)
index 0f9db87..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-package org.argeo.jackrabbit.client;
-
-import javax.jcr.RepositoryException;
-
-import org.apache.http.client.protocol.HttpClientContext;
-import org.apache.http.protocol.HttpContext;
-import org.apache.jackrabbit.spi.SessionInfo;
-import org.apache.jackrabbit.spi2davex.BatchReadConfig;
-import org.apache.jackrabbit.spi2davex.RepositoryServiceImpl;
-
-/**
- * Wrapper for {@link RepositoryServiceImpl} in order to access the underlying
- * {@link HttpClientContext}.
- */
-public class ClientDavexRepositoryService extends RepositoryServiceImpl {
-
-       public ClientDavexRepositoryService(String jcrServerURI, BatchReadConfig batchReadConfig)
-                       throws RepositoryException {
-               super(jcrServerURI, batchReadConfig);
-       }
-
-       public ClientDavexRepositoryService(String jcrServerURI, String defaultWorkspaceName,
-                       BatchReadConfig batchReadConfig, int itemInfoCacheSize, int maximumHttpConnections)
-                       throws RepositoryException {
-               super(jcrServerURI, defaultWorkspaceName, batchReadConfig, itemInfoCacheSize, maximumHttpConnections);
-       }
-
-       public ClientDavexRepositoryService(String jcrServerURI, String defaultWorkspaceName,
-                       BatchReadConfig batchReadConfig, int itemInfoCacheSize) throws RepositoryException {
-               super(jcrServerURI, defaultWorkspaceName, batchReadConfig, itemInfoCacheSize);
-       }
-
-       @Override
-       protected HttpContext getContext(SessionInfo sessionInfo) throws RepositoryException {
-               HttpClientContext result = HttpClientContext.create();
-               result.setAuthCache(new NonSerialBasicAuthCache());
-               return result;
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryServiceFactory.java b/org.argeo.jcr/src/org/argeo/jackrabbit/client/ClientDavexRepositoryServiceFactory.java
deleted file mode 100644 (file)
index 4b240f0..0000000
+++ /dev/null
@@ -1,82 +0,0 @@
-package org.argeo.jackrabbit.client;
-
-import java.util.Map;
-
-import javax.jcr.RepositoryException;
-
-import org.apache.jackrabbit.spi.RepositoryService;
-import org.apache.jackrabbit.spi.commons.ItemInfoCacheImpl;
-import org.apache.jackrabbit.spi2davex.BatchReadConfig;
-import org.apache.jackrabbit.spi2davex.Spi2davexRepositoryServiceFactory;
-
-/**
- * Wrapper for {@link Spi2davexRepositoryServiceFactory} in order to create a
- * {@link ClientDavexRepositoryService}.
- */
-public class ClientDavexRepositoryServiceFactory extends Spi2davexRepositoryServiceFactory {
-       @Override
-       public RepositoryService createRepositoryService(Map<?, ?> parameters) throws RepositoryException {
-               // retrieve the repository uri
-               String uri;
-               if (parameters == null) {
-                       uri = System.getProperty(PARAM_REPOSITORY_URI);
-               } else {
-                       Object repoUri = parameters.get(PARAM_REPOSITORY_URI);
-                       uri = (repoUri == null) ? null : repoUri.toString();
-               }
-               if (uri == null) {
-                       uri = DEFAULT_REPOSITORY_URI;
-               }
-
-               // load other optional configuration parameters
-               BatchReadConfig brc = null;
-               int itemInfoCacheSize = ItemInfoCacheImpl.DEFAULT_CACHE_SIZE;
-               int maximumHttpConnections = 0;
-
-               // since JCR-4120 the default workspace name is no longer set to 'default'
-               // note: if running with JCR Server < 1.5 a default workspace name must
-               // therefore be configured
-               String workspaceNameDefault = null;
-
-               if (parameters != null) {
-                       // batchRead config
-                       Object param = parameters.get(PARAM_BATCHREAD_CONFIG);
-                       if (param != null && param instanceof BatchReadConfig) {
-                               brc = (BatchReadConfig) param;
-                       }
-
-                       // itemCache size config
-                       param = parameters.get(PARAM_ITEMINFO_CACHE_SIZE);
-                       if (param != null) {
-                               try {
-                                       itemInfoCacheSize = Integer.parseInt(param.toString());
-                               } catch (NumberFormatException e) {
-                                       // ignore, use default
-                               }
-                       }
-
-                       // max connections config
-                       param = parameters.get(PARAM_MAX_CONNECTIONS);
-                       if (param != null) {
-                               try {
-                                       maximumHttpConnections = Integer.parseInt(param.toString());
-                               } catch (NumberFormatException e) {
-                                       // using default
-                               }
-                       }
-
-                       param = parameters.get(PARAM_WORKSPACE_NAME_DEFAULT);
-                       if (param != null) {
-                               workspaceNameDefault = param.toString();
-                       }
-               }
-
-               if (maximumHttpConnections > 0) {
-                       return new ClientDavexRepositoryService(uri, workspaceNameDefault, brc, itemInfoCacheSize,
-                                       maximumHttpConnections);
-               } else {
-                       return new ClientDavexRepositoryService(uri, workspaceNameDefault, brc, itemInfoCacheSize);
-               }
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/client/JackrabbitClient.java b/org.argeo.jcr/src/org/argeo/jackrabbit/client/JackrabbitClient.java
deleted file mode 100644 (file)
index e08f4d6..0000000
+++ /dev/null
@@ -1,125 +0,0 @@
-package org.argeo.jackrabbit.client;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.jcr.Node;
-import javax.jcr.NodeIterator;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.RepositoryFactory;
-import javax.jcr.Session;
-
-import org.apache.http.client.protocol.HttpClientContext;
-import org.apache.http.protocol.HttpContext;
-import org.apache.jackrabbit.jcr2dav.Jcr2davRepositoryFactory;
-import org.apache.jackrabbit.jcr2spi.Jcr2spiRepositoryFactory;
-import org.apache.jackrabbit.jcr2spi.RepositoryImpl;
-import org.apache.jackrabbit.spi.RepositoryService;
-import org.apache.jackrabbit.spi.RepositoryServiceFactory;
-import org.apache.jackrabbit.spi.SessionInfo;
-import org.apache.jackrabbit.spi.commons.ItemInfoCacheImpl;
-import org.apache.jackrabbit.spi2davex.BatchReadConfig;
-import org.apache.jackrabbit.spi2davex.RepositoryServiceImpl;
-import org.apache.jackrabbit.spi2davex.Spi2davexRepositoryServiceFactory;
-import org.argeo.jcr.JcrUtils;
-
-/** Minimal client to test JCR DAVEX connectivity. */
-public class JackrabbitClient {
-       final static String JACKRABBIT_REPOSITORY_URI = "org.apache.jackrabbit.repository.uri";
-       final static String JACKRABBIT_DAVEX_URI = "org.apache.jackrabbit.spi2davex.uri";
-       final static String JACKRABBIT_REMOTE_DEFAULT_WORKSPACE = "org.apache.jackrabbit.spi2davex.WorkspaceNameDefault";
-
-       public static void main(String[] args) {
-               String repoUri = args.length == 0 ? "http://root:demo@localhost:7070/jcr/ego" : args[0];
-               String workspace = args.length < 2 ? "home" : args[1];
-
-               Repository repository = null;
-               Session session = null;
-
-               URI uri;
-               try {
-                       uri = new URI(repoUri);
-               } catch (URISyntaxException e1) {
-                       throw new IllegalArgumentException(e1);
-               }
-
-               if (uri.getScheme().equals("http") || uri.getScheme().equals("https")) {
-
-                       RepositoryFactory repositoryFactory = new Jcr2davRepositoryFactory() {
-                               @SuppressWarnings("rawtypes")
-                               public Repository getRepository(Map parameters) throws RepositoryException {
-                                       RepositoryServiceFactory repositoryServiceFactory = new Spi2davexRepositoryServiceFactory() {
-
-                                               @Override
-                                               public RepositoryService createRepositoryService(Map<?, ?> parameters)
-                                                               throws RepositoryException {
-                                                       Object uri = parameters.get(JACKRABBIT_DAVEX_URI);
-                                                       Object defaultWorkspace = parameters.get(JACKRABBIT_REMOTE_DEFAULT_WORKSPACE);
-                                                       BatchReadConfig brc = null;
-                                                       return new RepositoryServiceImpl(uri.toString(), defaultWorkspace.toString(), brc,
-                                                                       ItemInfoCacheImpl.DEFAULT_CACHE_SIZE) {
-
-                                                               @Override
-                                                               protected HttpContext getContext(SessionInfo sessionInfo) throws RepositoryException {
-                                                                       HttpClientContext result = HttpClientContext.create();
-                                                                       result.setAuthCache(new NonSerialBasicAuthCache());
-                                                                       return result;
-                                                               }
-
-                                                       };
-                                               }
-                                       };
-                                       return RepositoryImpl.create(
-                                                       new Jcr2spiRepositoryFactory.RepositoryConfigImpl(repositoryServiceFactory, parameters));
-                               }
-                       };
-                       Map<String, String> params = new HashMap<String, String>();
-                       params.put(JACKRABBIT_DAVEX_URI, repoUri.toString());
-                       // FIXME make it configurable
-                       params.put(JACKRABBIT_REMOTE_DEFAULT_WORKSPACE, "sys");
-
-                       try {
-                               repository = repositoryFactory.getRepository(params);
-                               if (repository != null)
-                                       session = repository.login(workspace);
-                               else
-                                       throw new IllegalArgumentException("Repository " + repoUri + " not found");
-                       } catch (RepositoryException e) {
-                               e.printStackTrace();
-                       }
-
-               } else {
-                       Path path = Paths.get(uri.getPath());
-               }
-
-               try {
-                       Node rootNode = session.getRootNode();
-                       NodeIterator nit = rootNode.getNodes();
-                       while (nit.hasNext()) {
-                               System.out.println(nit.nextNode().getPath());
-                       }
-
-                       Node newNode = JcrUtils.mkdirs(rootNode, "dir/subdir");
-                       System.out.println("Created folder " + newNode.getPath());
-                       Node newFile = JcrUtils.copyBytesAsFile(newNode, "test.txt", "TEST".getBytes());
-                       System.out.println("Created file " + newFile.getPath());
-                       try (BufferedReader reader = new BufferedReader(new InputStreamReader(JcrUtils.getFileAsStream(newFile)))) {
-                               System.out.println("Read " + reader.readLine());
-                       } catch (IOException e) {
-                               e.printStackTrace();
-                       }
-                       newNode.getParent().remove();
-                       System.out.println("Removed new nodes");
-               } catch (RepositoryException e) {
-                       e.printStackTrace();
-               }
-       }
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/client/NonSerialBasicAuthCache.java b/org.argeo.jcr/src/org/argeo/jackrabbit/client/NonSerialBasicAuthCache.java
deleted file mode 100644 (file)
index 3fb0db9..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-package org.argeo.jackrabbit.client;
-
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.http.HttpHost;
-import org.apache.http.auth.AuthScheme;
-import org.apache.http.client.AuthCache;
-
-/**
- * Implementation of {@link AuthCache} which doesn't use serialization, as it is
- * not supported by GraalVM at this stage.
- */
-public class NonSerialBasicAuthCache implements AuthCache {
-       private final Map<HttpHost, AuthScheme> cache;
-
-       public NonSerialBasicAuthCache() {
-               cache = new ConcurrentHashMap<HttpHost, AuthScheme>();
-       }
-
-       @Override
-       public void put(HttpHost host, AuthScheme authScheme) {
-               cache.put(host, authScheme);
-       }
-
-       @Override
-       public AuthScheme get(HttpHost host) {
-               return cache.get(host);
-       }
-
-       @Override
-       public void remove(HttpHost host) {
-               cache.remove(host);
-       }
-
-       @Override
-       public void clear() {
-               cache.clear();
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/fs/AbstractJackrabbitFsProvider.java b/org.argeo.jcr/src/org/argeo/jackrabbit/fs/AbstractJackrabbitFsProvider.java
deleted file mode 100644 (file)
index a2eb983..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-package org.argeo.jackrabbit.fs;
-
-import org.argeo.jcr.fs.JcrFileSystemProvider;
-
-public abstract class AbstractJackrabbitFsProvider extends JcrFileSystemProvider {
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/fs/DavexFsProvider.java b/org.argeo.jcr/src/org/argeo/jackrabbit/fs/DavexFsProvider.java
deleted file mode 100644 (file)
index 57d348b..0000000
+++ /dev/null
@@ -1,144 +0,0 @@
-package org.argeo.jackrabbit.fs;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.nio.file.DirectoryStream;
-import java.nio.file.FileSystem;
-import java.nio.file.FileSystemAlreadyExistsException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.jcr.Repository;
-import javax.jcr.RepositoryFactory;
-import javax.jcr.Session;
-
-import org.argeo.jackrabbit.client.ClientDavexRepositoryFactory;
-import org.argeo.jcr.fs.JcrFileSystem;
-import org.argeo.jcr.fs.JcrFsException;
-
-/**
- * A file system provider based on a JCR repository remotely accessed via the
- * DAVEX protocol.
- */
-public class DavexFsProvider extends AbstractJackrabbitFsProvider {
-//     final static String JACKRABBIT_REPOSITORY_URI = "org.apache.jackrabbit.repository.uri";
-//     final static String JACKRABBIT_REMOTE_DEFAULT_WORKSPACE = "org.apache.jackrabbit.spi2davex.WorkspaceNameDefault";
-
-       private Map<String, JcrFileSystem> fileSystems = new HashMap<>();
-
-       @Override
-       public String getScheme() {
-               return "davex";
-       }
-
-       @Override
-       public FileSystem newFileSystem(URI uri, Map<String, ?> env) throws IOException {
-               if (uri.getHost() == null)
-                       throw new IllegalArgumentException("An host should be provided");
-               try {
-                       URI repoUri = new URI("http", uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath(), null, null);
-                       String repoKey = repoUri.toString();
-                       if (fileSystems.containsKey(repoKey))
-                               throw new FileSystemAlreadyExistsException("CMS file system already exists for " + repoKey);
-                       RepositoryFactory repositoryFactory = new ClientDavexRepositoryFactory();
-                       return tryGetRepo(repositoryFactory, repoUri, "home");
-               } catch (URISyntaxException e) {
-                       throw new IllegalArgumentException("Cannot open file system " + uri, e);
-               }
-       }
-
-       private JcrFileSystem tryGetRepo(RepositoryFactory repositoryFactory, URI repoUri, String workspace)
-                       throws IOException {
-               Map<String, String> params = new HashMap<String, String>();
-               params.put(ClientDavexRepositoryFactory.JACKRABBIT_DAVEX_URI, repoUri.toString());
-               // FIXME make it configurable
-               params.put(ClientDavexRepositoryFactory.JACKRABBIT_REMOTE_DEFAULT_WORKSPACE, "sys");
-               Repository repository = null;
-               Session session = null;
-               try {
-                       repository = repositoryFactory.getRepository(params);
-                       if (repository != null)
-                               session = repository.login(workspace);
-               } catch (Exception e) {
-                       // silent
-               }
-
-               if (session == null) {
-                       if (repoUri.getPath() == null || repoUri.getPath().equals("/"))
-                               return null;
-                       String repoUriStr = repoUri.toString();
-                       if (repoUriStr.endsWith("/"))
-                               repoUriStr = repoUriStr.substring(0, repoUriStr.length() - 1);
-                       String nextRepoUriStr = repoUriStr.substring(0, repoUriStr.lastIndexOf('/'));
-                       String nextWorkspace = repoUriStr.substring(repoUriStr.lastIndexOf('/') + 1);
-                       URI nextUri;
-                       try {
-                               nextUri = new URI(nextRepoUriStr);
-                       } catch (URISyntaxException e) {
-                               throw new IllegalArgumentException("Badly formatted URI", e);
-                       }
-                       return tryGetRepo(repositoryFactory, nextUri, nextWorkspace);
-               } else {
-                       JcrFileSystem fileSystem = new JcrFileSystem(this, repository);
-                       fileSystems.put(repoUri.toString() + "/" + workspace, fileSystem);
-                       return fileSystem;
-               }
-       }
-
-       @Override
-       public FileSystem getFileSystem(URI uri) {
-               return currentUserFileSystem(uri);
-       }
-
-       @Override
-       public Path getPath(URI uri) {
-               JcrFileSystem fileSystem = currentUserFileSystem(uri);
-               if (fileSystem == null)
-                       try {
-                               fileSystem = (JcrFileSystem) newFileSystem(uri, new HashMap<String, Object>());
-                       } catch (IOException e) {
-                               throw new JcrFsException("Could not autocreate file system", e);
-                       }
-               URI repoUri = null;
-               try {
-                       repoUri = new URI("http", uri.getUserInfo(), uri.getHost(), uri.getPort(), uri.getPath(), null, null);
-               } catch (URISyntaxException e) {
-                       throw new IllegalArgumentException(e);
-               }
-               String uriStr = repoUri.toString();
-               String localPath = null;
-               for (String key : fileSystems.keySet()) {
-                       if (uriStr.startsWith(key)) {
-                               localPath = uriStr.toString().substring(key.length());
-                       }
-               }
-               if ("".equals(localPath))
-                       localPath = "/";
-               return fileSystem.getPath(localPath);
-       }
-
-       private JcrFileSystem currentUserFileSystem(URI uri) {
-               for (String key : fileSystems.keySet()) {
-                       if (uri.toString().startsWith(key))
-                               return fileSystems.get(key);
-               }
-               return null;
-       }
-
-       public static void main(String args[]) {
-               try {
-                       DavexFsProvider fsProvider = new DavexFsProvider();
-                       Path path = fsProvider.getPath(new URI("davex://root:demo@localhost:7070/jcr/ego/"));
-                       System.out.println(path);
-                       DirectoryStream<Path> ds = Files.newDirectoryStream(path);
-                       for (Path p : ds) {
-                               System.out.println("- " + p);
-                       }
-               } catch (Exception e) {
-                       e.printStackTrace();
-               }
-       }
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/fs/JackrabbitMemoryFsProvider.java b/org.argeo.jcr/src/org/argeo/jackrabbit/fs/JackrabbitMemoryFsProvider.java
deleted file mode 100644 (file)
index e3a70d0..0000000
+++ /dev/null
@@ -1,87 +0,0 @@
-package org.argeo.jackrabbit.fs;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.nio.file.FileSystem;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.HashMap;
-import java.util.Map;
-
-import javax.jcr.Credentials;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.SimpleCredentials;
-
-import org.apache.jackrabbit.core.RepositoryImpl;
-import org.apache.jackrabbit.core.config.RepositoryConfig;
-import org.argeo.jcr.fs.JcrFileSystem;
-import org.argeo.jcr.fs.JcrFsException;
-
-public class JackrabbitMemoryFsProvider extends AbstractJackrabbitFsProvider {
-       private RepositoryImpl repository;
-       private JcrFileSystem fileSystem;
-
-       private Credentials credentials;
-
-       public JackrabbitMemoryFsProvider() {
-               String username = System.getProperty("user.name");
-               credentials = new SimpleCredentials(username, username.toCharArray());
-       }
-
-       @Override
-       public String getScheme() {
-               return "jcr+memory";
-       }
-
-       @Override
-       public FileSystem newFileSystem(URI uri, Map<String, ?> env) throws IOException {
-               try {
-                       Path tempDir = Files.createTempDirectory("fs-memory");
-                       URL confUrl = JackrabbitMemoryFsProvider.class.getResource("fs-memory.xml");
-                       RepositoryConfig repositoryConfig = RepositoryConfig.create(confUrl.toURI(), tempDir.toString());
-                       repository = RepositoryImpl.create(repositoryConfig);
-                       postRepositoryCreation(repository);
-                       fileSystem = new JcrFileSystem(this, repository, credentials);
-                       return fileSystem;
-               } catch (RepositoryException | URISyntaxException e) {
-                       throw new IOException("Cannot login to repository", e);
-               }
-       }
-
-       @Override
-       public FileSystem getFileSystem(URI uri) {
-               return fileSystem;
-       }
-
-       @Override
-       public Path getPath(URI uri) {
-               String path = uri.getPath();
-               if (fileSystem == null)
-                       try {
-                               newFileSystem(uri, new HashMap<String, Object>());
-                       } catch (IOException e) {
-                               throw new JcrFsException("Could not autocreate file system", e);
-                       }
-               return fileSystem.getPath(path);
-       }
-
-       public Repository getRepository() {
-               return repository;
-       }
-
-       public Session login() throws RepositoryException {
-               return getRepository().login(credentials);
-       }
-
-       /**
-        * Called after the repository has been created and before the file system is
-        * created.
-        */
-       protected void postRepositoryCreation(RepositoryImpl repositoryImpl) throws RepositoryException {
-
-       }
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/fs/fs-memory.xml b/org.argeo.jcr/src/org/argeo/jackrabbit/fs/fs-memory.xml
deleted file mode 100644 (file)
index f2541fb..0000000
+++ /dev/null
@@ -1,57 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
-       <!-- File system and datastore -->
-       <FileSystem
-               class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-
-       <!-- Workspace templates -->
-       <Workspaces rootPath="${rep.home}/workspaces"
-               defaultWorkspace="main" configRootPath="/workspaces" />
-       <Workspace name="${wsp.name}">
-               <FileSystem
-                       class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
-               </PersistenceManager>
-               <SearchIndex
-                       class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-                       <param name="path" value="${wsp.home}/index" />
-                       <param name="directoryManagerClass"
-                               value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
-                       <param name="extractorPoolSize" value="0" />
-                       <FileSystem
-                               class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               </SearchIndex>
-       </Workspace>
-
-       <!-- Versioning -->
-       <Versioning rootPath="${rep.home}/version">
-               <FileSystem
-                       class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
-               </PersistenceManager>
-       </Versioning>
-
-       <!-- Indexing -->
-       <SearchIndex
-               class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-               <param name="path" value="${rep.home}/index" />
-               <param name="directoryManagerClass"
-                       value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
-               <param name="extractorPoolSize" value="0" />
-               <FileSystem
-                       class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-       </SearchIndex>
-
-       <!-- Security -->
-       <Security appName="Jackrabbit">
-               <LoginModule
-                       class="org.apache.jackrabbit.core.security.SimpleLoginModule" />
-               <!-- <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager" -->
-               <!-- workspaceName="security" /> -->
-               <!-- <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" 
-                       /> -->
-       </Security>
-</Repository>
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/fs/package-info.java b/org.argeo.jcr/src/org/argeo/jackrabbit/fs/package-info.java
deleted file mode 100644 (file)
index c9ec2c3..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Java NIO file system implementation based on Jackrabbit. */
-package org.argeo.jackrabbit.fs;
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/package-info.java b/org.argeo.jcr/src/org/argeo/jackrabbit/package-info.java
deleted file mode 100644 (file)
index 17497d6..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Generic Jackrabbit utilities. */
-package org.argeo.jackrabbit;
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/repository-h2.xml b/org.argeo.jcr/src/org/argeo/jackrabbit/repository-h2.xml
deleted file mode 100644 (file)
index 0526762..0000000
+++ /dev/null
@@ -1,82 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
-       <!-- Shared datasource -->
-       <DataSources>
-               <DataSource name="dataSource">
-                       <param name="driver" value="org.h2.Driver" />
-                       <param name="url" value="${dburl}" />
-                       <param name="user" value="${dbuser}" />
-                       <param name="password" value="${dbpassword}" />
-                       <param name="databaseType" value="h2" />
-                       <param name="maxPoolSize" value="${maxPoolSize}" />
-               </DataSource>
-       </DataSources>
-
-       <!-- File system and datastore -->
-       <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-               <param name="dataSourceName" value="dataSource" />
-               <param name="schema" value="default" />
-               <param name="schemaObjectPrefix" value="fs_" />
-       </FileSystem>
-       <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
-               <param name="path" value="${rep.home}/datastore" />
-       </DataStore>
-
-       <!-- Workspace templates -->
-       <Workspaces rootPath="${rep.home}/workspaces"
-               defaultWorkspace="${defaultWorkspace}" />
-       <Workspace name="${wsp.name}">
-               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schema" value="default" />
-                       <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-                       <param name="path" value="${wsp.home}/index" />
-                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
-                       <param name="cacheSize" value="${searchCacheSize}" />
-                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-               </SearchIndex>
-               <WorkspaceSecurity>
-                       <AccessControlProvider
-                               class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
-               </WorkspaceSecurity>
-       </Workspace>
-
-       <!-- Versioning -->
-       <Versioning rootPath="${rep.home}/version">
-               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schema" value="default" />
-                       <param name="schemaObjectPrefix" value="fs_ver_" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schemaObjectPrefix" value="pm_ver_" />
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-       </Versioning>
-
-       <!-- Indexing -->
-       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-               <param name="path" value="${rep.home}/index" />
-               <param name="extractorPoolSize" value="${extractorPoolSize}" />
-               <param name="cacheSize" value="${searchCacheSize}" />
-               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-       </SearchIndex>
-
-       <!-- Security -->
-       <Security appName="Jackrabbit">
-               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
-                       workspaceName="security" />
-               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
-       </Security>
-</Repository>
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/repository-localfs.xml b/org.argeo.jcr/src/org/argeo/jackrabbit/repository-localfs.xml
deleted file mode 100644 (file)
index 3d24708..0000000
+++ /dev/null
@@ -1,60 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
-       <!-- File system and datastore -->
-       <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
-               <param name="path" value="${rep.home}/repository" />
-       </FileSystem>
-       <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
-               <param name="path" value="${rep.home}/datastore" />
-       </DataStore>
-
-       <!-- Workspace templates -->
-       <Workspaces rootPath="${rep.home}/workspaces"
-               defaultWorkspace="${defaultWorkspace}" />
-       <Workspace name="${wsp.name}">
-               <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
-                       <param name="path" value="${wsp.home}" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-                       <param name="path" value="${wsp.home}/index" />
-                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
-                       <param name="cacheSize" value="${searchCacheSize}" />
-                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-               </SearchIndex>
-               <WorkspaceSecurity>
-                       <AccessControlProvider
-                               class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
-               </WorkspaceSecurity>
-       </Workspace>
-
-       <!-- Versioning -->
-       <Versioning rootPath="${rep.home}/version">
-               <FileSystem class="org.apache.jackrabbit.core.fs.local.LocalFileSystem">
-                       <param name="path" value="${rep.home}/version" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-       </Versioning>
-
-       <!-- Indexing -->
-       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-               <param name="path" value="${rep.home}/index" />
-               <param name="extractorPoolSize" value="${extractorPoolSize}" />
-               <param name="cacheSize" value="${searchCacheSize}" />
-               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-       </SearchIndex>
-
-       <!-- Security -->
-       <Security appName="Jackrabbit">
-               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
-                       workspaceName="security" />
-               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
-       </Security>
-</Repository>
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/repository-memory.xml b/org.argeo.jcr/src/org/argeo/jackrabbit/repository-memory.xml
deleted file mode 100644 (file)
index ecee5bd..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
-       <!-- File system and datastore -->
-       <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-
-       <!-- Workspace templates -->
-       <Workspaces rootPath="${rep.home}/workspaces"
-               defaultWorkspace="${defaultWorkspace}" configRootPath="/workspaces" />
-       <Workspace name="${wsp.name}">
-               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
-                       <param name="blobFSBlockSize" value="1" />
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-                       <param name="path" value="${wsp.home}/index" />
-                       <param name="directoryManagerClass"
-                               value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
-                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
-                       <param name="cacheSize" value="${searchCacheSize}" />
-                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-                       <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               </SearchIndex>
-       </Workspace>
-
-       <!-- Versioning -->
-       <Versioning rootPath="${rep.home}/version">
-               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
-                       <param name="blobFSBlockSize" value="1" />
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-       </Versioning>
-
-       <!-- Indexing -->
-       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-               <param name="path" value="${rep.home}/index" />
-               <param name="directoryManagerClass"
-                       value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
-               <param name="extractorPoolSize" value="${extractorPoolSize}" />
-               <param name="cacheSize" value="${searchCacheSize}" />
-               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-       </SearchIndex>
-
-       <!-- Security -->
-       <Security appName="Jackrabbit">
-               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
-                       workspaceName="security" />
-               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
-       </Security>
-</Repository>
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/repository-postgresql-ds.xml b/org.argeo.jcr/src/org/argeo/jackrabbit/repository-postgresql-ds.xml
deleted file mode 100644 (file)
index 07a0d04..0000000
+++ /dev/null
@@ -1,82 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
-       <!-- Shared datasource -->
-       <DataSources>
-               <DataSource name="dataSource">
-                       <param name="driver" value="org.postgresql.Driver" />
-                       <param name="url" value="${dburl}" />
-                       <param name="user" value="${dbuser}" />
-                       <param name="password" value="${dbpassword}" />
-                       <param name="databaseType" value="postgresql" />
-                       <param name="maxPoolSize" value="${maxPoolSize}" />
-               </DataSource>
-       </DataSources>
-
-       <!-- File system and datastore -->
-       <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-               <param name="dataSourceName" value="dataSource" />
-               <param name="schema" value="postgresql" />
-               <param name="schemaObjectPrefix" value="fs_" />
-       </FileSystem>
-       <DataStore class="org.apache.jackrabbit.core.data.FileDataStore">
-               <param name="path" value="${rep.home}/datastore" />
-       </DataStore>
-
-       <!-- Workspace templates -->
-       <Workspaces rootPath="${rep.home}/workspaces"
-               defaultWorkspace="${defaultWorkspace}" />
-       <Workspace name="${wsp.name}">
-               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schema" value="postgresql" />
-                       <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-                       <param name="path" value="${wsp.home}/index" />
-                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
-                       <param name="cacheSize" value="${searchCacheSize}" />
-                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-               </SearchIndex>
-               <WorkspaceSecurity>
-                       <AccessControlProvider
-                               class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
-               </WorkspaceSecurity>
-       </Workspace>
-
-       <!-- Versioning -->
-       <Versioning rootPath="${rep.home}/version">
-               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schema" value="postgresql" />
-                       <param name="schemaObjectPrefix" value="fs_ver_" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schemaObjectPrefix" value="pm_ver_" />
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-       </Versioning>
-
-       <!-- Indexing -->
-       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-               <param name="path" value="${rep.home}/index" />
-               <param name="extractorPoolSize" value="${extractorPoolSize}" />
-               <param name="cacheSize" value="${searchCacheSize}" />
-               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-       </SearchIndex>
-
-       <!-- Security -->
-       <Security appName="Jackrabbit">
-               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
-                       workspaceName="security" />
-               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
-       </Security>
-</Repository>
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/repository-postgresql.xml b/org.argeo.jcr/src/org/argeo/jackrabbit/repository-postgresql.xml
deleted file mode 100644 (file)
index 9677828..0000000
+++ /dev/null
@@ -1,79 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "Jackrabbit 2.6" "http://jackrabbit.apache.org/dtd/repository-2.6.dtd">
-<Repository>
-       <!-- Shared datasource -->
-       <DataSources>
-               <DataSource name="dataSource">
-                       <param name="driver" value="org.postgresql.Driver" />
-                       <param name="url" value="${dburl}" />
-                       <param name="user" value="${dbuser}" />
-                       <param name="password" value="${dbpassword}" />
-                       <param name="databaseType" value="postgresql" />
-                       <param name="maxPoolSize" value="${maxPoolSize}" />
-               </DataSource>
-       </DataSources>
-
-       <!-- File system and datastore -->
-       <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-               <param name="dataSourceName" value="dataSource" />
-               <param name="schema" value="postgresql" />
-               <param name="schemaObjectPrefix" value="fs_" />
-       </FileSystem>
-
-       <!-- Workspace templates -->
-       <Workspaces rootPath="${rep.home}/workspaces"
-               defaultWorkspace="${defaultWorkspace}" />
-       <Workspace name="${wsp.name}">
-               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schema" value="postgresql" />
-                       <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-                       <param name="path" value="${wsp.home}/index" />
-                       <param name="extractorPoolSize" value="${extractorPoolSize}" />
-                       <param name="cacheSize" value="${searchCacheSize}" />
-                       <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-               </SearchIndex>
-               <WorkspaceSecurity>
-                       <AccessControlProvider
-                               class="org.argeo.security.jackrabbit.ArgeoAccessControlProvider" />
-               </WorkspaceSecurity>
-       </Workspace>
-
-       <!-- Versioning -->
-       <Versioning rootPath="${rep.home}/version">
-               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schema" value="postgresql" />
-                       <param name="schemaObjectPrefix" value="fs_ver_" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.pool.PostgreSQLPersistenceManager">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schemaObjectPrefix" value="pm_ver_" />
-                       <param name="bundleCacheSize" value="${bundleCacheMB}" />
-               </PersistenceManager>
-       </Versioning>
-
-       <!-- Indexing -->
-       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-               <param name="path" value="${rep.home}/index" />
-               <param name="extractorPoolSize" value="${extractorPoolSize}" />
-               <param name="cacheSize" value="${searchCacheSize}" />
-               <param name="maxVolatileIndexSize" value="${maxVolatileIndexSize}" />
-       </SearchIndex>
-
-       <!-- Security -->
-       <Security appName="Jackrabbit">
-               <SecurityManager class="org.argeo.security.jackrabbit.ArgeoSecurityManager"
-                       workspaceName="security" />
-               <AccessManager class="org.argeo.security.jackrabbit.ArgeoAccessManager" />
-       </Security>
-</Repository>
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/security/JackrabbitSecurityUtils.java b/org.argeo.jcr/src/org/argeo/jackrabbit/security/JackrabbitSecurityUtils.java
deleted file mode 100644 (file)
index a75c795..0000000
+++ /dev/null
@@ -1,80 +0,0 @@
-package org.argeo.jackrabbit.security;
-
-import java.security.Principal;
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.security.Privilege;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.jackrabbit.api.security.JackrabbitAccessControlList;
-import org.apache.jackrabbit.api.security.JackrabbitAccessControlManager;
-import org.argeo.jcr.JcrUtils;
-
-/** Utilities around Jackrabbit security extensions. */
-public class JackrabbitSecurityUtils {
-       private final static Log log = LogFactory.getLog(JackrabbitSecurityUtils.class);
-
-       /**
-        * Convenience method for denying a single privilege to a principal (user or
-        * role), typically jcr:all
-        */
-       public synchronized static void denyPrivilege(Session session, String path, String principal, String privilege)
-                       throws RepositoryException {
-               List<Privilege> privileges = new ArrayList<Privilege>();
-               privileges.add(session.getAccessControlManager().privilegeFromName(privilege));
-               denyPrivileges(session, path, () -> principal, privileges);
-       }
-
-       /**
-        * Deny privileges on a path to a {@link Principal}. The path must already
-        * exist. Session is saved. Synchronized to prevent concurrent modifications of
-        * the same node.
-        */
-       public synchronized static Boolean denyPrivileges(Session session, String path, Principal principal,
-                       List<Privilege> privs) throws RepositoryException {
-               // make sure the session is in line with the persisted state
-               session.refresh(false);
-               JackrabbitAccessControlManager acm = (JackrabbitAccessControlManager) session.getAccessControlManager();
-               JackrabbitAccessControlList acl = (JackrabbitAccessControlList) JcrUtils.getAccessControlList(acm, path);
-
-//             accessControlEntries: for (AccessControlEntry ace : acl.getAccessControlEntries()) {
-//                     Principal currentPrincipal = ace.getPrincipal();
-//                     if (currentPrincipal.getName().equals(principal.getName())) {
-//                             Privilege[] currentPrivileges = ace.getPrivileges();
-//                             if (currentPrivileges.length != privs.size())
-//                                     break accessControlEntries;
-//                             for (int i = 0; i < currentPrivileges.length; i++) {
-//                                     Privilege currP = currentPrivileges[i];
-//                                     Privilege p = privs.get(i);
-//                                     if (!currP.getName().equals(p.getName())) {
-//                                             break accessControlEntries;
-//                                     }
-//                             }
-//                             return false;
-//                     }
-//             }
-
-               Privilege[] privileges = privs.toArray(new Privilege[privs.size()]);
-               acl.addEntry(principal, privileges, false);
-               acm.setPolicy(path, acl);
-               if (log.isDebugEnabled()) {
-                       StringBuffer privBuf = new StringBuffer();
-                       for (Privilege priv : privs)
-                               privBuf.append(priv.getName());
-                       log.debug("Denied privileges " + privBuf + " to " + principal.getName() + " on " + path + " in '"
-                                       + session.getWorkspace().getName() + "'");
-               }
-               session.refresh(true);
-               session.save();
-               return true;
-       }
-
-       /** Singleton. */
-       private JackrabbitSecurityUtils() {
-
-       }
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/security/package-info.java b/org.argeo.jcr/src/org/argeo/jackrabbit/security/package-info.java
deleted file mode 100644 (file)
index f3a282c..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Generic Jackrabbit security utilities. */
-package org.argeo.jackrabbit.security;
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/unit/AbstractJackrabbitTestCase.java b/org.argeo.jcr/src/org/argeo/jackrabbit/unit/AbstractJackrabbitTestCase.java
deleted file mode 100644 (file)
index f65432e..0000000
+++ /dev/null
@@ -1,51 +0,0 @@
-package org.argeo.jackrabbit.unit;
-
-import java.net.URL;
-
-import javax.jcr.Repository;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.jackrabbit.core.RepositoryImpl;
-import org.apache.jackrabbit.core.config.RepositoryConfig;
-import org.argeo.jcr.unit.AbstractJcrTestCase;
-
-/** Factorizes configuration of an in memory transient repository */
-public abstract class AbstractJackrabbitTestCase extends AbstractJcrTestCase {
-       protected RepositoryImpl repositoryImpl;
-
-       // protected File getRepositoryFile() throws Exception {
-       // Resource res = new ClassPathResource(
-       // "org/argeo/jackrabbit/unit/repository-memory.xml");
-       // return res.getFile();
-       // }
-
-       public AbstractJackrabbitTestCase() {
-               URL url = AbstractJackrabbitTestCase.class.getResource("jaas.config");
-               assert url != null;
-               System.setProperty("java.security.auth.login.config", url.toString());
-       }
-
-       protected Repository createRepository() throws Exception {
-               // Repository repository = new TransientRepository(getRepositoryFile(),
-               // getHomeDir());
-               RepositoryConfig repositoryConfig = RepositoryConfig.create(
-                               AbstractJackrabbitTestCase.class
-                                               .getResourceAsStream(getRepositoryConfigResource()),
-                               getHomeDir().getAbsolutePath());
-               RepositoryImpl repositoryImpl = RepositoryImpl.create(repositoryConfig);
-               return repositoryImpl;
-       }
-
-       protected String getRepositoryConfigResource() {
-               return "repository-memory.xml";
-       }
-
-       @Override
-       protected void clearRepository(Repository repository) throws Exception {
-               RepositoryImpl repositoryImpl = (RepositoryImpl) repository;
-               if (repositoryImpl != null)
-                       repositoryImpl.shutdown();
-               FileUtils.deleteDirectory(getHomeDir());
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/unit/jaas.config b/org.argeo.jcr/src/org/argeo/jackrabbit/unit/jaas.config
deleted file mode 100644 (file)
index 0313f91..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-TEST_JACKRABBIT_ADMIN {
-   org.argeo.cms.auth.DataAdminLoginModule requisite;
-};
-
-Jackrabbit {
-   org.argeo.security.jackrabbit.SystemJackrabbitLoginModule requisite;
-};
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/unit/package-info.java b/org.argeo.jcr/src/org/argeo/jackrabbit/unit/package-info.java
deleted file mode 100644 (file)
index 3b6143b..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Helpers for unit tests with Jackrabbit repositories. */
-package org.argeo.jackrabbit.unit;
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/unit/repository-h2.xml b/org.argeo.jcr/src/org/argeo/jackrabbit/unit/repository-h2.xml
deleted file mode 100644 (file)
index 348dc28..0000000
+++ /dev/null
@@ -1,81 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE Repository PUBLIC "-//The Apache Software Foundation//DTD Jackrabbit 1.6//EN"
-                            "http://jackrabbit.apache.org/dtd/repository-2.0.dtd">
-<Repository>
-       <!-- Shared datasource -->
-       <DataSources>
-               <DataSource name="dataSource">
-                       <param name="driver" value="org.h2.Driver" />
-                       <param name="url" value="jdbc:h2:mem:jackrabbit" />
-                       <param name="user" value="sa" />
-                       <param name="password" value="" />
-                       <param name="databaseType" value="h2" />
-                       <param name="maxPoolSize" value="10" />
-               </DataSource>
-       </DataSources>
-
-       <!-- File system and datastore -->
-       <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-               <param name="dataSourceName" value="dataSource" />
-               <param name="schema" value="default" />
-               <param name="schemaObjectPrefix" value="fs_" />
-       </FileSystem>
-       <DataStore class="org.apache.jackrabbit.core.data.db.DbDataStore">
-               <param name="dataSourceName" value="dataSource" />
-               <param name="schemaObjectPrefix" value="ds_" />
-       </DataStore>
-
-       <!-- Workspace templates -->
-       <Workspaces rootPath="${rep.home}/workspaces"
-               defaultWorkspace="dev" />
-       <Workspace name="${wsp.name}">
-               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schema" value="default" />
-                       <param name="schemaObjectPrefix" value="${wsp.name}_fs_" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schemaObjectPrefix" value="${wsp.name}_pm_" />
-               </PersistenceManager>
-               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-                       <param name="path" value="${wsp.home}/index" />
-               </SearchIndex>
-       </Workspace>
-
-       <!-- Versioning -->
-       <Versioning rootPath="${rep.home}/version">
-               <FileSystem class="org.apache.jackrabbit.core.fs.db.DbFileSystem">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schema" value="default" />
-                       <param name="schemaObjectPrefix" value="fs_ver_" />
-               </FileSystem>
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.pool.H2PersistenceManager">
-                       <param name="dataSourceName" value="dataSource" />
-                       <param name="schemaObjectPrefix" value="pm_ver_" />
-               </PersistenceManager>
-       </Versioning>
-
-       <!-- Indexing -->
-       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-               <param name="path" value="${rep.home}/repository/index" />
-               <param name="extractorPoolSize" value="2" />
-               <param name="supportHighlighting" value="true" />
-       </SearchIndex>
-
-       <!-- Security -->
-       <Security appName="Jackrabbit">
-               <SecurityManager
-                       class="org.apache.jackrabbit.core.security.simple.SimpleSecurityManager"
-                       workspaceName="security" />
-               <AccessManager
-                       class="org.apache.jackrabbit.core.security.simple.SimpleAccessManager" />
-               <LoginModule
-                       class="org.apache.jackrabbit.core.security.simple.SimpleLoginModule">
-                       <param name="anonymousId" value="anonymous" />
-                       <param name="adminId" value="admin" />
-               </LoginModule>
-       </Security>
-</Repository>
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jackrabbit/unit/repository-memory.xml b/org.argeo.jcr/src/org/argeo/jackrabbit/unit/repository-memory.xml
deleted file mode 100644 (file)
index 8395424..0000000
+++ /dev/null
@@ -1,72 +0,0 @@
-<?xml version="1.0"?>
-<!--
-
-    Copyright (C) 2007-2012 Argeo GmbH
-
-    Licensed under the Apache License, Version 2.0 (the "License");
-    you may not use this file except in compliance with the License.
-    You may obtain a copy of the License at
-
-            http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS,
-    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and
-    limitations under the License.
-
--->
-<!DOCTYPE Repository PUBLIC "-//The Apache Software Foundation//DTD Jackrabbit 1.6//EN"
-                            "http://jackrabbit.apache.org/dtd/repository-2.0.dtd">
-<Repository>
-       <!-- File system and datastore -->
-       <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-
-       <!-- Workspace templates -->
-       <Workspaces rootPath="${rep.home}/workspaces"
-               defaultWorkspace="main" configRootPath="/workspaces" />
-       <Workspace name="${wsp.name}">
-               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
-                       <param name="blobFSBlockSize" value="1" />
-               </PersistenceManager>
-               <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-                       <param name="path" value="${rep.home}/repository/index" />
-                       <param name="directoryManagerClass"
-                               value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
-                       <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               </SearchIndex>
-       </Workspace>
-
-       <!-- Versioning -->
-       <Versioning rootPath="${rep.home}/version">
-               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-               <PersistenceManager
-                       class="org.apache.jackrabbit.core.persistence.bundle.BundleFsPersistenceManager">
-                       <param name="blobFSBlockSize" value="1" />
-               </PersistenceManager>
-       </Versioning>
-
-       <!-- Indexing -->
-       <SearchIndex class="org.apache.jackrabbit.core.query.lucene.SearchIndex">
-               <param name="path" value="${rep.home}/repository/index" />
-               <param name="directoryManagerClass"
-                       value="org.apache.jackrabbit.core.query.lucene.directory.RAMDirectoryManager" />
-               <FileSystem class="org.apache.jackrabbit.core.fs.mem.MemoryFileSystem" />
-       </SearchIndex>
-
-       <!-- Security -->
-       <Security appName="Jackrabbit">
-               <SecurityManager
-                       class="org.apache.jackrabbit.core.security.simple.SimpleSecurityManager"
-                       workspaceName="security" />
-               <AccessManager
-                       class="org.apache.jackrabbit.core.security.simple.SimpleAccessManager" />
-               <LoginModule
-                       class="org.apache.jackrabbit.core.security.simple.SimpleLoginModule">
-                       <param name="anonymousId" value="anonymous" />
-                       <param name="adminId" value="admin" />
-               </LoginModule>
-       </Security>
-</Repository>
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jcr/proxy/AbstractUrlProxy.java b/org.argeo.jcr/src/org/argeo/jcr/proxy/AbstractUrlProxy.java
deleted file mode 100644 (file)
index 0984276..0000000
+++ /dev/null
@@ -1,154 +0,0 @@
-package org.argeo.jcr.proxy;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.net.URL;
-
-import javax.jcr.Binary;
-import javax.jcr.Node;
-import javax.jcr.Property;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.nodetype.NodeType;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.argeo.jcr.JcrException;
-import org.argeo.jcr.JcrUtils;
-
-/** Base class for URL based proxys. */
-public abstract class AbstractUrlProxy implements ResourceProxy {
-       private final static Log log = LogFactory.getLog(AbstractUrlProxy.class);
-
-       private Repository jcrRepository;
-       private Session jcrAdminSession;
-       private String proxyWorkspace = "proxy";
-
-       protected abstract Node retrieve(Session session, String path);
-
-       void init() {
-               try {
-                       jcrAdminSession = JcrUtils.loginOrCreateWorkspace(jcrRepository, proxyWorkspace);
-                       beforeInitSessionSave(jcrAdminSession);
-                       if (jcrAdminSession.hasPendingChanges())
-                               jcrAdminSession.save();
-               } catch (RepositoryException e) {
-                       JcrUtils.discardQuietly(jcrAdminSession);
-                       throw new JcrException("Cannot initialize URL proxy", e);
-               }
-       }
-
-       /**
-        * Called before the (admin) session is saved at the end of the initialization.
-        * Does nothing by default, to be overridden.
-        */
-       protected void beforeInitSessionSave(Session session) throws RepositoryException {
-       }
-
-       void destroy() {
-               JcrUtils.logoutQuietly(jcrAdminSession);
-       }
-
-       /**
-        * Called before the (admin) session is logged out when resources are released.
-        * Does nothing by default, to be overridden.
-        */
-       protected void beforeDestroySessionLogout() throws RepositoryException {
-       }
-
-       public Node proxy(String path) {
-               // we open a JCR session with client credentials in order not to use the
-               // admin session in multiple thread or make it a bottleneck.
-               Node nodeAdmin = null;
-               Node nodeClient = null;
-               Session clientSession = null;
-               try {
-                       clientSession = jcrRepository.login(proxyWorkspace);
-                       if (!clientSession.itemExists(path) || shouldUpdate(clientSession, path)) {
-                               nodeAdmin = retrieveAndSave(path);
-                               if (nodeAdmin != null)
-                                       nodeClient = clientSession.getNode(path);
-                       } else
-                               nodeClient = clientSession.getNode(path);
-                       return nodeClient;
-               } catch (RepositoryException e) {
-                       throw new JcrException("Cannot proxy " + path, e);
-               } finally {
-                       if (nodeClient == null)
-                               JcrUtils.logoutQuietly(clientSession);
-               }
-       }
-
-       protected synchronized Node retrieveAndSave(String path) {
-               try {
-                       Node node = retrieve(jcrAdminSession, path);
-                       if (node == null)
-                               return null;
-                       jcrAdminSession.save();
-                       return node;
-               } catch (RepositoryException e) {
-                       JcrUtils.discardQuietly(jcrAdminSession);
-                       throw new JcrException("Cannot retrieve and save " + path, e);
-               } finally {
-                       notifyAll();
-               }
-       }
-
-       /** Session is not saved */
-       protected synchronized Node proxyUrl(Session session, String remoteUrl, String path) throws RepositoryException {
-               Node node = null;
-               if (session.itemExists(path)) {
-                       // throw new ArgeoJcrException("Node " + path + " already exists");
-               }
-               try (InputStream in = new URL(remoteUrl).openStream()) {
-                       // URL u = new URL(remoteUrl);
-                       // in = u.openStream();
-                       node = importFile(session, path, in);
-               } catch (IOException e) {
-                       if (log.isDebugEnabled()) {
-                               log.debug("Cannot read " + remoteUrl + ", skipping... " + e.getMessage());
-                               // log.trace("Cannot read because of ", e);
-                       }
-                       JcrUtils.discardQuietly(session);
-                       // } finally {
-                       // IOUtils.closeQuietly(in);
-               }
-               return node;
-       }
-
-       protected synchronized Node importFile(Session session, String path, InputStream in) throws RepositoryException {
-               Binary binary = null;
-               try {
-                       Node content = null;
-                       Node node = null;
-                       if (!session.itemExists(path)) {
-                               node = JcrUtils.mkdirs(session, path, NodeType.NT_FILE, NodeType.NT_FOLDER, false);
-                               content = node.addNode(Node.JCR_CONTENT, NodeType.NT_UNSTRUCTURED);
-                       } else {
-                               node = session.getNode(path);
-                               content = node.getNode(Node.JCR_CONTENT);
-                       }
-                       binary = session.getValueFactory().createBinary(in);
-                       content.setProperty(Property.JCR_DATA, binary);
-                       JcrUtils.updateLastModifiedAndParents(node, null, true);
-                       return node;
-               } finally {
-                       JcrUtils.closeQuietly(binary);
-               }
-       }
-
-       /** Whether the file should be updated. */
-       protected Boolean shouldUpdate(Session clientSession, String nodePath) {
-               return false;
-       }
-
-       public void setJcrRepository(Repository jcrRepository) {
-               this.jcrRepository = jcrRepository;
-       }
-
-       public void setProxyWorkspace(String localWorkspace) {
-               this.proxyWorkspace = localWorkspace;
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jcr/proxy/ResourceProxy.java b/org.argeo.jcr/src/org/argeo/jcr/proxy/ResourceProxy.java
deleted file mode 100644 (file)
index 84eea1f..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-package org.argeo.jcr.proxy;
-
-import javax.jcr.Node;
-
-/** A proxy which nows how to resolve and synchronize relative URLs */
-public interface ResourceProxy {
-       /**
-        * Proxy the file referenced by this relative path in the underlying
-        * repository. A new session is created by each call, so the underlying
-        * session of the returned node must be closed by the caller.
-        * 
-        * @return the proxied Node, <code>null</code> if the resource was not found
-        *         (e.g. HTTP 404)
-        */
-       public Node proxy(String relativePath);
-}
diff --git a/org.argeo.jcr/src/org/argeo/jcr/proxy/ResourceProxyServlet.java b/org.argeo.jcr/src/org/argeo/jcr/proxy/ResourceProxyServlet.java
deleted file mode 100644 (file)
index d77bd49..0000000
+++ /dev/null
@@ -1,116 +0,0 @@
-package org.argeo.jcr.proxy;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-import javax.jcr.Node;
-import javax.jcr.Property;
-import javax.jcr.RepositoryException;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.argeo.jcr.JcrException;
-import org.argeo.jcr.Bin;
-import org.argeo.jcr.JcrUtils;
-
-/** Wraps a proxy via HTTP */
-public class ResourceProxyServlet extends HttpServlet {
-       private static final long serialVersionUID = -8886549549223155801L;
-
-       private final static Log log = LogFactory
-                       .getLog(ResourceProxyServlet.class);
-
-       private ResourceProxy proxy;
-
-       private String contentTypeCharset = "UTF-8";
-
-       @Override
-       protected void doGet(HttpServletRequest request,
-                       HttpServletResponse response) throws ServletException, IOException {
-               String path = request.getPathInfo();
-
-               if (log.isTraceEnabled()) {
-                       log.trace("path=" + path);
-                       log.trace("UserPrincipal = " + request.getUserPrincipal().getName());
-                       log.trace("SessionID = " + request.getSession(false).getId());
-                       log.trace("ContextPath = " + request.getContextPath());
-                       log.trace("ServletPath = " + request.getServletPath());
-                       log.trace("PathInfo = " + request.getPathInfo());
-                       log.trace("Method = " + request.getMethod());
-                       log.trace("User-Agent = " + request.getHeader("User-Agent"));
-               }
-
-               Node node = null;
-               try {
-                       node = proxy.proxy(path);
-                       if (node == null)
-                               response.sendError(404);
-                       else
-                               processResponse(node, response);
-               } finally {
-                       if (node != null)
-                               try {
-                                       JcrUtils.logoutQuietly(node.getSession());
-                               } catch (RepositoryException e) {
-                                       // silent
-                               }
-               }
-
-       }
-
-       /** Retrieve the content of the node. */
-       protected void processResponse(Node node, HttpServletResponse response) {
-//             Binary binary = null;
-//             InputStream in = null;
-               try(Bin binary = new Bin( node.getNode(Property.JCR_CONTENT)
-                               .getProperty(Property.JCR_DATA));InputStream in = binary.getStream()) {
-                       String fileName = node.getName();
-                       String ext = FilenameUtils.getExtension(fileName);
-
-                       // TODO use a more generic / standard approach
-                       // see http://svn.apache.org/viewvc/tomcat/trunk/conf/web.xml
-                       String contentType;
-                       if ("xml".equals(ext))
-                               contentType = "text/xml;charset=" + contentTypeCharset;
-                       else if ("jar".equals(ext))
-                               contentType = "application/java-archive";
-                       else if ("zip".equals(ext))
-                               contentType = "application/zip";
-                       else if ("gz".equals(ext))
-                               contentType = "application/x-gzip";
-                       else if ("bz2".equals(ext))
-                               contentType = "application/x-bzip2";
-                       else if ("tar".equals(ext))
-                               contentType = "application/x-tar";
-                       else if ("rpm".equals(ext))
-                               contentType = "application/x-redhat-package-manager";
-                       else
-                               contentType = "application/octet-stream";
-                       contentType = contentType + ";name=\"" + fileName + "\"";
-                       response.setHeader("Content-Disposition", "attachment; filename=\""
-                                       + fileName + "\"");
-                       response.setHeader("Expires", "0");
-                       response.setHeader("Cache-Control", "no-cache, must-revalidate");
-                       response.setHeader("Pragma", "no-cache");
-
-                       response.setContentType(contentType);
-
-                       IOUtils.copy(in, response.getOutputStream());
-               } catch (RepositoryException e) {
-                       throw new JcrException("Cannot download " + node, e);
-               } catch (IOException e) {
-                       throw new RuntimeException("Cannot download " + node, e);
-               }
-       }
-
-       public void setProxy(ResourceProxy resourceProxy) {
-               this.proxy = resourceProxy;
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jcr/proxy/package-info.java b/org.argeo.jcr/src/org/argeo/jcr/proxy/package-info.java
deleted file mode 100644 (file)
index a578c45..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Components to build proxys based on JCR. */
-package org.argeo.jcr.proxy;
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/jcr/unit/AbstractJcrTestCase.java b/org.argeo.jcr/src/org/argeo/jcr/unit/AbstractJcrTestCase.java
deleted file mode 100644 (file)
index dc2963a..0000000
+++ /dev/null
@@ -1,116 +0,0 @@
-package org.argeo.jcr.unit;
-
-import java.io.File;
-import java.security.AccessController;
-import java.security.PrivilegedAction;
-
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.SimpleCredentials;
-import javax.security.auth.Subject;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.argeo.jcr.JcrException;
-
-import junit.framework.TestCase;
-
-/** Base for unit tests with a JCR repository. */
-public abstract class AbstractJcrTestCase extends TestCase {
-       private final static Log log = LogFactory.getLog(AbstractJcrTestCase.class);
-
-       private Repository repository;
-       private Session session = null;
-
-       public final static String LOGIN_CONTEXT_TEST_SYSTEM = "TEST_JACKRABBIT_ADMIN";
-
-       // protected abstract File getRepositoryFile() throws Exception;
-
-       protected abstract Repository createRepository() throws Exception;
-
-       protected abstract void clearRepository(Repository repository) throws Exception;
-
-       @Override
-       protected void setUp() throws Exception {
-               File homeDir = getHomeDir();
-               FileUtils.deleteDirectory(homeDir);
-               repository = createRepository();
-       }
-
-       @Override
-       protected void tearDown() throws Exception {
-               if (session != null) {
-                       session.logout();
-                       if (log.isTraceEnabled())
-                               log.trace("Logout session");
-               }
-               clearRepository(repository);
-       }
-
-       protected Session session() {
-               if (session != null && session.isLive())
-                       return session;
-               Session session;
-               if (getLoginContext() != null) {
-                       LoginContext lc;
-                       try {
-                               lc = new LoginContext(getLoginContext());
-                               lc.login();
-                       } catch (LoginException e) {
-                               throw new IllegalStateException("JAAS login failed", e);
-                       }
-                       session = Subject.doAs(lc.getSubject(), new PrivilegedAction<Session>() {
-
-                               @Override
-                               public Session run() {
-                                       return login();
-                               }
-
-                       });
-               } else
-                       session = login();
-               this.session = session;
-               return this.session;
-       }
-
-       protected String getLoginContext() {
-               return null;
-       }
-
-       protected Session login() {
-               try {
-                       if (log.isTraceEnabled())
-                               log.trace("Login session");
-                       Subject subject = Subject.getSubject(AccessController.getContext());
-                       if (subject != null)
-                               return getRepository().login();
-                       else
-                               return getRepository().login(new SimpleCredentials("demo", "demo".toCharArray()));
-               } catch (RepositoryException e) {
-                       throw new JcrException("Cannot login to repository", e);
-               }
-       }
-
-       protected Repository getRepository() {
-               return repository;
-       }
-
-       /**
-        * enables children class to set an existing repository in case it is not
-        * deleted on startup, to test migration by instance
-        */
-       public void setRepository(Repository repository) {
-               this.repository = repository;
-       }
-
-       protected File getHomeDir() {
-               File homeDir = new File(System.getProperty("java.io.tmpdir"),
-                               AbstractJcrTestCase.class.getSimpleName() + "-" + System.getProperty("user.name"));
-               return homeDir;
-       }
-
-}
diff --git a/org.argeo.jcr/src/org/argeo/jcr/unit/package-info.java b/org.argeo.jcr/src/org/argeo/jcr/unit/package-info.java
deleted file mode 100644 (file)
index c6e7415..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Helpers for unit tests with JCR repositories. */
-package org.argeo.jcr.unit;
\ No newline at end of file
diff --git a/org.argeo.jcr/src/org/argeo/sync/SyncException.java b/org.argeo.jcr/src/org/argeo/sync/SyncException.java
deleted file mode 100644 (file)
index 89bf869..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-package org.argeo.sync;
-
-/** Commons exception for sync */
-public class SyncException extends RuntimeException {
-       private static final long serialVersionUID = -3371314343580218538L;
-
-       public SyncException(String message) {
-               super(message);
-       }
-
-       public SyncException(String message, Throwable cause) {
-               super(message, cause);
-       }
-
-       public SyncException(Object source, Object target, Throwable cause) {
-               super("Cannot sync from " + source + " to " + target, cause);
-       }
-}
diff --git a/org.argeo.jcr/src/org/argeo/sync/SyncResult.java b/org.argeo.jcr/src/org/argeo/sync/SyncResult.java
deleted file mode 100644 (file)
index 6d12ada..0000000
+++ /dev/null
@@ -1,101 +0,0 @@
-package org.argeo.sync;
-
-import java.time.Instant;
-import java.util.Set;
-import java.util.TreeSet;
-
-/** Describes what happendend during a sync operation. */
-public class SyncResult<T> {
-       private final Set<T> added = new TreeSet<>();
-       private final Set<T> modified = new TreeSet<>();
-       private final Set<T> deleted = new TreeSet<>();
-       private final Set<Error> errors = new TreeSet<>();
-
-       public Set<T> getAdded() {
-               return added;
-       }
-
-       public Set<T> getModified() {
-               return modified;
-       }
-
-       public Set<T> getDeleted() {
-               return deleted;
-       }
-
-       public Set<Error> getErrors() {
-               return errors;
-       }
-
-       public void addError(T sourcePath, T targetPath, Exception e) {
-               Error error = new Error(sourcePath, targetPath, e);
-               errors.add(error);
-       }
-
-       public boolean noModification() {
-               return modified.isEmpty() && deleted.isEmpty() && added.isEmpty();
-       }
-
-       @Override
-       public String toString() {
-               if (noModification())
-                       return "No modification.";
-               StringBuffer sb = new StringBuffer();
-               for (T p : modified)
-                       sb.append("MOD ").append(p).append('\n');
-               for (T p : deleted)
-                       sb.append("DEL ").append(p).append('\n');
-               for (T p : added)
-                       sb.append("ADD ").append(p).append('\n');
-               for (Error error : errors)
-                       sb.append(error).append('\n');
-               return sb.toString();
-       }
-
-       public class Error implements Comparable<Error> {
-               private final T sourcePath;// if null this is a failed delete
-               private final T targetPath;
-               private final Exception exception;
-               private final Instant timestamp = Instant.now();
-
-               public Error(T sourcePath, T targetPath, Exception e) {
-                       super();
-                       this.sourcePath = sourcePath;
-                       this.targetPath = targetPath;
-                       this.exception = e;
-               }
-
-               public T getSourcePath() {
-                       return sourcePath;
-               }
-
-               public T getTargetPath() {
-                       return targetPath;
-               }
-
-               public Exception getException() {
-                       return exception;
-               }
-
-               public Instant getTimestamp() {
-                       return timestamp;
-               }
-
-               @Override
-               public int compareTo(Error o) {
-                       return timestamp.compareTo(o.timestamp);
-               }
-
-               @Override
-               public int hashCode() {
-                       return timestamp.hashCode();
-               }
-
-               @Override
-               public String toString() {
-                       return "ERR " + timestamp + (sourcePath == null ? "Deletion failed" : "Copy failed " + sourcePath) + " "
-                                       + targetPath + " " + exception.getMessage();
-               }
-
-       }
-}
diff --git a/org.argeo.jcr/src/org/argeo/sync/package-info.java b/org.argeo.jcr/src/org/argeo/sync/package-info.java
deleted file mode 100644 (file)
index c5e9da0..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Synchrnoisation related utilities. */
-package org.argeo.sync;
\ No newline at end of file
diff --git a/org.argeo.util/.classpath b/org.argeo.util/.classpath
deleted file mode 100644 (file)
index 4e5da1d..0000000
+++ /dev/null
@@ -1,10 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-       <classpathentry kind="src" path="src" />
-       <classpathentry kind="src" path="ext/test" />
-       <classpathentry kind="con"
-               path="org.eclipse.pde.core.requiredPlugins" />
-       <classpathentry kind="con"
-               path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8" />
-       <classpathentry kind="output" path="bin" />
-</classpath>
diff --git a/org.argeo.util/.gitignore b/org.argeo.util/.gitignore
deleted file mode 100644 (file)
index 09e3bc9..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/bin/
-/target/
diff --git a/org.argeo.util/.project b/org.argeo.util/.project
deleted file mode 100644 (file)
index 171ff88..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-       <name>org.argeo.util</name>
-       <comment></comment>
-       <projects>
-       </projects>
-       <buildSpec>
-               <buildCommand>
-                       <name>org.eclipse.jdt.core.javabuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.ManifestBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-               <buildCommand>
-                       <name>org.eclipse.pde.SchemaBuilder</name>
-                       <arguments>
-                       </arguments>
-               </buildCommand>
-       </buildSpec>
-       <natures>
-               <nature>org.eclipse.jdt.core.javanature</nature>
-               <nature>org.eclipse.pde.PluginNature</nature>
-       </natures>
-</projectDescription>
diff --git a/org.argeo.util/META-INF/.gitignore b/org.argeo.util/META-INF/.gitignore
deleted file mode 100644 (file)
index 4854a41..0000000
+++ /dev/null
@@ -1 +0,0 @@
-/MANIFEST.MF
diff --git a/org.argeo.util/bnd.bnd b/org.argeo.util/bnd.bnd
deleted file mode 100644 (file)
index e69de29..0000000
diff --git a/org.argeo.util/build.properties b/org.argeo.util/build.properties
deleted file mode 100644 (file)
index d34068d..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-source.. = src/,\
-           ext/test/
-output.. = bin/
diff --git a/org.argeo.util/ext/test/org/argeo/util/CsvParserEncodingTest.java b/org.argeo.util/ext/test/org/argeo/util/CsvParserEncodingTest.java
deleted file mode 100644 (file)
index 09443c2..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-package org.argeo.util;
-
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.util.List;
-
-/** Tests that {@link CsvParser} can deal properly with encodings. */
-public class CsvParserEncodingTest {
-
-       private String iso = "ISO-8859-1";
-       private String utf8 = "UTF-8";
-
-       public void testParse() throws Exception {
-
-               String xml = new String("áéíóúñ,éééé");
-               byte[] utfBytes = xml.getBytes(utf8);
-               byte[] isoBytes = xml.getBytes(iso);
-
-               InputStream inUtf = new ByteArrayInputStream(utfBytes);
-               InputStream inIso = new ByteArrayInputStream(isoBytes);
-
-               CsvParser csvParser = new CsvParser() {
-                       protected void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
-                               assert header.size() == tokens.size();
-                               assert 2 == tokens.size();
-                               assert "áéíóúñ".equals(tokens.get(0));
-                               assert "éééé".equals(tokens.get(1));
-                       }
-               };
-
-               csvParser.parse(inUtf, utf8);
-               inUtf.close();
-               csvParser.parse(inIso, iso);
-               inIso.close();
-       }
-}
diff --git a/org.argeo.util/ext/test/org/argeo/util/CsvParserParseFileTest.java b/org.argeo.util/ext/test/org/argeo/util/CsvParserParseFileTest.java
deleted file mode 100644 (file)
index 5a92c68..0000000
+++ /dev/null
@@ -1,25 +0,0 @@
-package org.argeo.util;
-
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Map;
-
-/** Test that {@link CsvParser} can properly parse a CSV file. */
-public class CsvParserParseFileTest {
-       public void testParse() throws Exception {
-
-               final Map<Integer, Map<String, String>> lines = new HashMap<Integer, Map<String, String>>();
-               InputStream in = getClass().getResourceAsStream("/org/argeo/util/ReferenceFile.csv");
-               CsvParserWithLinesAsMap parser = new CsvParserWithLinesAsMap() {
-                       protected void processLine(Integer lineNumber, Map<String, String> line) {
-                               lines.put(lineNumber, line);
-                       }
-               };
-
-               parser.parse(in);
-               in.close();
-
-               assert 5 == lines.size();
-       }
-
-}
diff --git a/org.argeo.util/ext/test/org/argeo/util/CsvParserTest.java b/org.argeo.util/ext/test/org/argeo/util/CsvParserTest.java
deleted file mode 100644 (file)
index e59dbd1..0000000
+++ /dev/null
@@ -1,30 +0,0 @@
-package org.argeo.util;
-
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.util.List;
-
-/** {@link CsvParser} tests. */
-public class CsvParserTest {
-       public void testParse() throws Exception {
-               String toParse = "Header1,\"Header\n2\",Header3,\"Header4\"\n" + "Col1,\"Col\n2\",Col3,\"\"\"Col4\"\"\"\n"
-                               + "Col1,\"Col\n2\",Col3,\"\"\"Col4\"\"\"\n" + "Col1,\"Col\n2\",Col3,\"\"\"Col4\"\"\"\n";
-
-               InputStream in = new ByteArrayInputStream(toParse.getBytes());
-
-               CsvParser csvParser = new CsvParser() {
-                       protected void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
-                               assert header.size() == tokens.size();
-                               assert 4 == tokens.size();
-                               assert "Col1".equals(tokens.get(0));
-                               assert "Col\n2".equals(tokens.get(1));
-                               assert "Col3".equals(tokens.get(2));
-                               assert "\"Col4\"".equals(tokens.get(3));
-                       }
-               };
-
-               csvParser.parse(in);
-               in.close();
-       }
-
-}
diff --git a/org.argeo.util/ext/test/org/argeo/util/CsvParserWithQuotedSeparatorTest.java b/org.argeo.util/ext/test/org/argeo/util/CsvParserWithQuotedSeparatorTest.java
deleted file mode 100644 (file)
index 67ba346..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-package org.argeo.util;
-
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/** Test that {@link CsvParser} deals properly with "" quotes. */
-public class CsvParserWithQuotedSeparatorTest {
-       public void testSimpleParse() throws Exception {
-               String toParse = "Header1,\"Header2\",Header3,\"Header4\"\n"
-                               + "\"Col1, Col2\",\"Col\n2\",Col3,\"\"\"Col4\"\"\"\n";
-
-               InputStream in = new ByteArrayInputStream(toParse.getBytes());
-
-               CsvParser csvParser = new CsvParser() {
-                       protected void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
-                               assert header.size() == tokens.size();
-                               assert 4 == tokens.size();
-                               assert "Col1, Col2".equals(tokens.get(0));
-                       }
-               };
-               // System.out.println(toParse);
-               csvParser.parse(in);
-               in.close();
-
-       }
-
-       public void testParseFile() throws Exception {
-
-               final Map<Integer, Map<String, String>> lines = new HashMap<Integer, Map<String, String>>();
-               InputStream in = getClass().getResourceAsStream("/org/argeo/util/ReferenceFile.csv");
-
-               CsvParserWithLinesAsMap parser = new CsvParserWithLinesAsMap() {
-                       protected void processLine(Integer lineNumber, Map<String, String> line) {
-                               // System.out.println("processing line #" + lineNumber);
-                               lines.put(lineNumber, line);
-                       }
-               };
-
-               parser.parse(in);
-               in.close();
-
-               Map<String, String> line = lines.get(2);
-               assert ",,,,".equals(line.get("Coma testing"));
-               line = lines.get(3);
-               assert ",, ,,".equals(line.get("Coma testing"));
-               line = lines.get(4);
-               assert "module1, module2".equals(line.get("Coma testing"));
-               line = lines.get(5);
-               assert "module1,module2".equals(line.get("Coma testing"));
-               line = lines.get(6);
-               assert ",module1,module2, \nmodule3, module4".equals(line.get("Coma testing"));
-               assert 5 == lines.size();
-
-       }
-}
diff --git a/org.argeo.util/ext/test/org/argeo/util/CsvWriterTest.java b/org.argeo.util/ext/test/org/argeo/util/CsvWriterTest.java
deleted file mode 100644 (file)
index ff5dcc5..0000000
+++ /dev/null
@@ -1,47 +0,0 @@
-package org.argeo.util;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/** {@link CsvWriter} tests. */
-public class CsvWriterTest {
-       public void testWrite() throws Exception {
-               ByteArrayOutputStream out = new ByteArrayOutputStream();
-               final CsvWriter csvWriter = new CsvWriter(out);
-
-               String[] header = { "Header1", "Header 2", "Header,3", "Header\n4", "Header\"5\"" };
-               String[] line1 = { "Value1", "Value 2", "Value,3", "Value\n4", "Value\"5\"" };
-               csvWriter.writeLine(Arrays.asList(header));
-               csvWriter.writeLine(Arrays.asList(line1));
-
-               String reference = "Header1,Header 2,\"Header,3\",\"Header\n4\",\"Header\"\"5\"\"\"\n"
-                               + "Value1,Value 2,\"Value,3\",\"Value\n4\",\"Value\"\"5\"\"\"\n";
-               String written = new String(out.toByteArray());
-               assert reference.equals(written);
-               out.close();
-               System.out.println(written);
-
-               final List<String> allTokens = new ArrayList<String>();
-               CsvParser csvParser = new CsvParser() {
-                       protected void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
-                               if (lineNumber == 2)
-                                       allTokens.addAll(header);
-                               allTokens.addAll(tokens);
-                       }
-               };
-               ByteArrayInputStream in = new ByteArrayInputStream(written.getBytes());
-               csvParser.parse(in);
-               in.close();
-               List<String> allTokensRef = new ArrayList<String>();
-               allTokensRef.addAll(Arrays.asList(header));
-               allTokensRef.addAll(Arrays.asList(line1));
-
-               assert allTokensRef.size() == allTokens.size();
-               for (int i = 0; i < allTokensRef.size(); i++)
-                       assert allTokensRef.get(i).equals(allTokens.get(i));
-       }
-
-}
diff --git a/org.argeo.util/ext/test/org/argeo/util/ReferenceFile.csv b/org.argeo.util/ext/test/org/argeo/util/ReferenceFile.csv
deleted file mode 100644 (file)
index 351453d..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-"ID","A long Text","Name","Other","Number","Reference","Target","Date","Update","Language","ID Ref","Weird chars","line feeds","after line feed","Empty column","Status comment","Comments","Empty","Coma testing"
-"AK251","Everything & with some line feed 
- more “some” quote","Marge S.",,78.6,"A1155222221111268515131",,12/12/12,03/12/08,,9821308500721,"%%%ùù","ao","Nothing special",,,"Some very usefull comment",,",,,,"
-"AG254","same","Roger “wallace” Big","15 – JI",78.5,"A1155222221111268515131","next milestone",12/12/12,03/12/08,"_fr (French - France)",9812309500953,"***µ”","a
-
-
-
-
-o","after line feed",,"Do the job",,,",, ,,"
-"FG211","Very long text with some bullets.
-1 first
-2 second
-3. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long","Father & Son","15 – JI",15.4,"A1155222221111268515131","next milestone",12/12/12,03/12/08,"_fr (French - France)",9812309500952,"///","a
-
-
-
-
-
-
-o","module1,module2",,"Be fast",,,"module1, module2"
-"RRT152","Very long text with some bullets.
-1 first
-2 second
-3. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long. some more very very very long","Another $$","15 – JI",12.3,"A1155222221111268515131","next milestone",12/12/12,03/12/08,"_fr (French - France)",9812309500950,"---","a
-
-o
-
-
-","module1,module2",,,,,"module1,module2"
-"YU121","Another use case : “blank line”
-
-After the blank.","nothing with brackets( )","15 – JI",15.2,"A1155222221111268515131",,12/12/12,03/12/08,"_fr (French - France)",9812309500925,",;:?./","ao","
-
-
-
-After line feed again",,,,,",module1,module2, 
-module3, module4"
diff --git a/org.argeo.util/ext/test/org/argeo/util/TestParse-ISO.csv b/org.argeo.util/ext/test/org/argeo/util/TestParse-ISO.csv
deleted file mode 100644 (file)
index 0bec611..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-"Date d'imputation","N° de compte","Code journal","Pièce interne","Pièce externe","Libellé d'écriture","Débit","Crédit","Lettrage","Quantité","Code analytique","Date d'échéance","Date d'imputation origine","Code journal origine","Mode de règlement","Date début de période","Date fin de période"
-26.01.2010,"101300","BQ","BQ01.10",,"Depot société en formation",,"3.000,00",,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"101300","BQ","BQ01.10",,"Depot société en formation",,"7.000,00",,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"411OPEN","BQ","BQ01.10",,"Vir Client ",,"2.508,00","A",,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"455100","BQ","BQ01.10",,"Bankomat Raiffeise","250,00",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"512101","BQ","BQ01.10",,"Extrait bancaire 01.10","12.250,55",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"627800","BQ","BQ01.10",,"Envoi de chequier","2,30",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"627800","BQ","BQ01.10",,"Frais d'expedition","5,15",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
diff --git a/org.argeo.util/ext/test/org/argeo/util/TestParse-UTF-8.csv b/org.argeo.util/ext/test/org/argeo/util/TestParse-UTF-8.csv
deleted file mode 100644 (file)
index 0bec611..0000000
+++ /dev/null
@@ -1,8 +0,0 @@
-"Date d'imputation","N° de compte","Code journal","Pièce interne","Pièce externe","Libellé d'écriture","Débit","Crédit","Lettrage","Quantité","Code analytique","Date d'échéance","Date d'imputation origine","Code journal origine","Mode de règlement","Date début de période","Date fin de période"
-26.01.2010,"101300","BQ","BQ01.10",,"Depot société en formation",,"3.000,00",,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"101300","BQ","BQ01.10",,"Depot société en formation",,"7.000,00",,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"411OPEN","BQ","BQ01.10",,"Vir Client ",,"2.508,00","A",,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"455100","BQ","BQ01.10",,"Bankomat Raiffeise","250,00",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"512101","BQ","BQ01.10",,"Extrait bancaire 01.10","12.250,55",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"627800","BQ","BQ01.10",,"Envoi de chequier","2,30",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
-26.01.2010,"627800","BQ","BQ01.10",,"Frais d'expedition","5,15",,,,,"          ",26.01.2010,"BQ","    ","          ","          "
diff --git a/org.argeo.util/ext/test/org/argeo/util/ThroughputTest.java b/org.argeo.util/ext/test/org/argeo/util/ThroughputTest.java
deleted file mode 100644 (file)
index d62f55c..0000000
+++ /dev/null
@@ -1,17 +0,0 @@
-package org.argeo.util;
-
-public class ThroughputTest {
-       public void testParse() throws Exception {
-//             assert 0 == 1;
-
-               Throughput t;
-               t = new Throughput("3.54/s");
-               assert 3.54d == t.getValue();
-               assert Throughput.Unit.s.equals(t.getUnit());
-               assert 282l == (long) t.asMsPeriod();
-
-               t = new Throughput("35698.2569/h");
-               assert Throughput.Unit.h.equals(t.getUnit());
-               assert 101l == (long) t.asMsPeriod();
-       }
-}
diff --git a/org.argeo.util/pom.xml b/org.argeo.util/pom.xml
deleted file mode 100644 (file)
index 51a0e9c..0000000
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-       <modelVersion>4.0.0</modelVersion>
-       <parent>
-               <groupId>org.argeo.commons</groupId>
-               <artifactId>argeo-commons</artifactId>
-               <version>2.1.89-SNAPSHOT</version>
-               <relativePath>..</relativePath>
-       </parent>
-       <artifactId>org.argeo.util</artifactId>
-       <name>Commons Utilities</name>
-</project>
\ No newline at end of file
diff --git a/org.argeo.util/src/org/argeo/util/CsvParser.java b/org.argeo.util/src/org/argeo/util/CsvParser.java
deleted file mode 100644 (file)
index b903f77..0000000
+++ /dev/null
@@ -1,242 +0,0 @@
-package org.argeo.util;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.io.UnsupportedEncodingException;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-/**
- * Parses a CSV file interpreting the first line as a header. The
- * {@link #parse(InputStream)} method and the setters are synchronized so that
- * the object cannot be modified when parsing.
- */
-public abstract class CsvParser {
-       private char separator = ',';
-       private char quote = '\"';
-
-       private Boolean noHeader = false;
-       private Boolean strictLineAsLongAsHeader = true;
-
-       /**
-        * Actually process a parsed line. If
-        * {@link #setStrictLineAsLongAsHeader(Boolean)} is true (default) the header
-        * and the tokens are guaranteed to have the same size.
-        * 
-        * @param lineNumber the current line number, starts at 1 (the header, if header
-        *                   processing is enabled, the first line otherwise)
-        * @param header     the read-only header or null if
-        *                   {@link #setNoHeader(Boolean)} is true (default is false)
-        * @param tokens     the parsed tokens
-        */
-       protected abstract void processLine(Integer lineNumber, List<String> header, List<String> tokens);
-
-       /**
-        * Parses the CSV file (stream is closed at the end)
-        * 
-        * @param in the stream to parse
-        * 
-        * @deprecated Use {@link #parse(InputStream, Charset)} instead.
-        */
-       @Deprecated
-       public synchronized void parse(InputStream in) {
-               parse(in, (Charset) null);
-       }
-
-       /**
-        * Parses the CSV file (stream is closed at the end)
-        * 
-        * @param in       the stream to parse
-        * @param encoding the encoding to use.
-        * 
-        * @deprecated Use {@link #parse(InputStream, Charset)} instead.
-        */
-       @Deprecated
-       public synchronized void parse(InputStream in, String encoding) {
-               Reader reader;
-               if (encoding == null)
-                       reader = new InputStreamReader(in);
-               else
-                       try {
-                               reader = new InputStreamReader(in, encoding);
-                       } catch (UnsupportedEncodingException e) {
-                               throw new IllegalArgumentException(e);
-                       }
-               parse(reader);
-       }
-
-       /**
-        * Parses the CSV file (stream is closed at the end)
-        * 
-        * @param in      the stream to parse
-        * @param charset the charset to use
-        */
-       public synchronized void parse(InputStream in, Charset charset) {
-               Reader reader;
-               if (charset == null)
-                       reader = new InputStreamReader(in);
-               else
-                       reader = new InputStreamReader(in, charset);
-               parse(reader);
-       }
-
-       /**
-        * Parses the CSV file (stream is closed at the end)
-        * 
-        * @param reader the reader to use (it will be buffered)
-        */
-       public synchronized void parse(Reader reader) {
-               Integer lineCount = 0;
-               try (BufferedReader bufferedReader = new BufferedReader(reader)) {
-                       List<String> header = null;
-                       if (!noHeader) {
-                               String headerStr = bufferedReader.readLine();
-                               if (headerStr == null)// empty file
-                                       return;
-                               lineCount++;
-                               header = new ArrayList<String>();
-                               StringBuffer currStr = new StringBuffer("");
-                               Boolean wasInquote = false;
-                               while (parseLine(headerStr, header, currStr, wasInquote)) {
-                                       headerStr = bufferedReader.readLine();
-                                       if (headerStr == null)
-                                               break;
-                                       wasInquote = true;
-                               }
-                               header = Collections.unmodifiableList(header);
-                       }
-
-                       String line = null;
-                       lines: while ((line = bufferedReader.readLine()) != null) {
-                               line = preProcessLine(line);
-                               if (line == null) {
-                                       // skip line
-                                       continue lines;
-                               }
-                               lineCount++;
-                               List<String> tokens = new ArrayList<String>();
-                               StringBuffer currStr = new StringBuffer("");
-                               Boolean wasInquote = false;
-                               sublines: while (parseLine(line, tokens, currStr, wasInquote)) {
-                                       line = bufferedReader.readLine();
-                                       if (line == null)
-                                               break sublines;
-                                       wasInquote = true;
-                               }
-                               if (!noHeader && strictLineAsLongAsHeader) {
-                                       int headerSize = header.size();
-                                       int tokenSize = tokens.size();
-                                       if (tokenSize == 1 && line.trim().equals(""))
-                                               continue lines;// empty line
-                                       if (headerSize != tokenSize) {
-                                               throw new IllegalStateException("Token size " + tokenSize + " is different from header size "
-                                                               + headerSize + " at line " + lineCount + ", line: " + line + ", header: " + header
-                                                               + ", tokens: " + tokens);
-                                       }
-                               }
-                               processLine(lineCount, header, tokens);
-                       }
-               } catch (IOException e) {
-                       throw new RuntimeException("Cannot parse CSV file (line: " + lineCount + ")", e);
-               }
-       }
-
-       /**
-        * Called before each (logical) line is processed, giving a change to modify it
-        * (typically for cleaning dirty files). To be overridden, return the line
-        * unchanged by default. Skip the line if 'null' is returned.
-        */
-       protected String preProcessLine(String line) {
-               return line;
-       }
-
-       /**
-        * Parses a line character by character for performance purpose
-        * 
-        * @return whether to continue parsing this line
-        */
-       protected Boolean parseLine(String str, List<String> tokens, StringBuffer currStr, Boolean wasInquote) {
-               if (wasInquote)
-                       currStr.append('\n');
-
-               char[] arr = str.toCharArray();
-               boolean inQuote = wasInquote;
-               for (int i = 0; i < arr.length; i++) {
-                       char c = arr[i];
-                       if (c == separator) {
-                               if (!inQuote) {
-                                       tokens.add(currStr.toString());
-//                                     currStr.delete(0, currStr.length());
-                                       currStr.setLength(0);
-                                       currStr.trimToSize();
-                               } else {
-                                       // we don't remove separator that are in a quoted substring
-                                       // System.out
-                                       // .println("IN QUOTE, got a separator: [" + c + "]");
-                                       currStr.append(c);
-                               }
-                       } else if (c == quote) {
-                               if (inQuote && (i + 1) < arr.length && arr[i + 1] == quote) {
-                                       // case of double quote
-                                       currStr.append(quote);
-                                       i++;
-                               } else {// standard
-                                       inQuote = inQuote ? false : true;
-                               }
-                       } else {
-                               currStr.append(c);
-                       }
-               }
-
-               if (!inQuote) {
-                       tokens.add(currStr.toString());
-                       // System.out.println("# TOKEN: " + currStr);
-               }
-               // if (inQuote)
-               // throw new ArgeoException("Missing quote at the end of the line "
-               // + str + " (parsed: " + tokens + ")");
-               if (inQuote)
-                       return true;
-               else
-                       return false;
-               // return tokens;
-       }
-
-       public char getSeparator() {
-               return separator;
-       }
-
-       public synchronized void setSeparator(char separator) {
-               this.separator = separator;
-       }
-
-       public char getQuote() {
-               return quote;
-       }
-
-       public synchronized void setQuote(char quote) {
-               this.quote = quote;
-       }
-
-       public Boolean getNoHeader() {
-               return noHeader;
-       }
-
-       public synchronized void setNoHeader(Boolean noHeader) {
-               this.noHeader = noHeader;
-       }
-
-       public Boolean getStrictLineAsLongAsHeader() {
-               return strictLineAsLongAsHeader;
-       }
-
-       public synchronized void setStrictLineAsLongAsHeader(Boolean strictLineAsLongAsHeader) {
-               this.strictLineAsLongAsHeader = strictLineAsLongAsHeader;
-       }
-
-}
diff --git a/org.argeo.util/src/org/argeo/util/CsvParserWithLinesAsMap.java b/org.argeo.util/src/org/argeo/util/CsvParserWithLinesAsMap.java
deleted file mode 100644 (file)
index 8eb6e94..0000000
+++ /dev/null
@@ -1,36 +0,0 @@
-package org.argeo.util;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * CSV parser allowing to process lines as maps whose keys are the header
- * fields.
- */
-public abstract class CsvParserWithLinesAsMap extends CsvParser {
-
-       /**
-        * Actually processes a line.
-        * 
-        * @param lineNumber the current line number, starts at 1 (the header, if header
-        *                   processing is enabled, the first lien otherwise)
-        * @param line       the parsed tokens as a map whose keys are the header fields
-        */
-       protected abstract void processLine(Integer lineNumber, Map<String, String> line);
-
-       protected final void processLine(Integer lineNumber, List<String> header, List<String> tokens) {
-               if (header == null)
-                       throw new IllegalArgumentException("Only CSV with header is supported");
-               Map<String, String> line = new HashMap<String, String>();
-               for (int i = 0; i < header.size(); i++) {
-                       String key = header.get(i);
-                       String value = null;
-                       if (i < tokens.size())
-                               value = tokens.get(i);
-                       line.put(key, value);
-               }
-               processLine(lineNumber, line);
-       }
-
-}
diff --git a/org.argeo.util/src/org/argeo/util/CsvWriter.java b/org.argeo.util/src/org/argeo/util/CsvWriter.java
deleted file mode 100644 (file)
index 41ea65d..0000000
+++ /dev/null
@@ -1,142 +0,0 @@
-package org.argeo.util;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.UnsupportedEncodingException;
-import java.io.Writer;
-import java.nio.charset.Charset;
-import java.util.Iterator;
-import java.util.List;
-
-/** Write in CSV format. */
-public class CsvWriter {
-       private final Writer out;
-
-       private char separator = ',';
-       private char quote = '\"';
-
-       /**
-        * Creates a CSV writer.
-        * 
-        * @param out the stream to write to. Caller is responsible for closing it.
-        * 
-        * @deprecated Use {@link #CsvWriter(OutputStream, Charset)} instead.
-        * 
-        */
-       @Deprecated
-       public CsvWriter(OutputStream out) {
-               this.out = new OutputStreamWriter(out);
-       }
-
-       /**
-        * Creates a CSV writer.
-        * 
-        * @param out      the stream to write to. Caller is responsible for closing it.
-        * @param encoding the encoding to use.
-        * 
-        * @deprecated Use {@link #CsvWriter(OutputStream, Charset)} instead.
-        */
-       @Deprecated
-       public CsvWriter(OutputStream out, String encoding) {
-               try {
-                       this.out = new OutputStreamWriter(out, encoding);
-               } catch (UnsupportedEncodingException e) {
-                       throw new IllegalArgumentException(e);
-               }
-       }
-
-       /**
-        * Creates a CSV writer.
-        * 
-        * @param out     the stream to write to. Caller is responsible for closing it.
-        * @param charset the charset to use
-        */
-       public CsvWriter(OutputStream out, Charset charset) {
-               this.out = new OutputStreamWriter(out, charset);
-       }
-
-       /**
-        * Write a CSV line. Also used to write a header if needed (this is transparent
-        * for the CSV writer): simply call it first, before writing the lines.
-        */
-       public void writeLine(List<?> tokens) {
-               try {
-                       Iterator<?> it = tokens.iterator();
-                       while (it.hasNext()) {
-                               writeToken(it.next().toString());
-                               if (it.hasNext())
-                                       out.write(separator);
-                       }
-                       out.write('\n');
-                       out.flush();
-               } catch (IOException e) {
-                       throw new RuntimeException("Could not write " + tokens, e);
-               }
-       }
-
-       /**
-        * Write a CSV line. Also used to write a header if needed (this is transparent
-        * for the CSV writer): simply call it first, before writing the lines.
-        */
-       public void writeLine(Object[] tokens) {
-               try {
-                       for (int i = 0; i < tokens.length; i++) {
-                               if (tokens[i] == null) {
-                                       // TODO configure how to deal with null
-                                       writeToken("");
-                               } else {
-                                       writeToken(tokens[i].toString());
-                               }
-                               if (i != (tokens.length - 1))
-                                       out.write(separator);
-                       }
-                       out.write('\n');
-                       out.flush();
-               } catch (IOException e) {
-                       throw new RuntimeException("Could not write " + tokens, e);
-               }
-       }
-
-       protected void writeToken(String token) throws IOException {
-               // +2 for possible quotes, another +2 assuming there would be an already
-               // quoted string where quotes needs to be duplicated
-               // another +2 for safety
-               // we don't want to increase buffer size while writing
-               StringBuffer buf = new StringBuffer(token.length() + 6);
-               char[] arr = token.toCharArray();
-               boolean shouldQuote = false;
-               for (char c : arr) {
-                       if (!shouldQuote) {
-                               if (c == separator)
-                                       shouldQuote = true;
-                               if (c == '\n')
-                                       shouldQuote = true;
-                       }
-
-                       if (c == quote) {
-                               shouldQuote = true;
-                               // duplicate quote
-                               buf.append(quote);
-                       }
-
-                       // generic case
-                       buf.append(c);
-               }
-
-               if (shouldQuote == true)
-                       out.write(quote);
-               out.write(buf.toString());
-               if (shouldQuote == true)
-                       out.write(quote);
-       }
-
-       public void setSeparator(char separator) {
-               this.separator = separator;
-       }
-
-       public void setQuote(char quote) {
-               this.quote = quote;
-       }
-
-}
diff --git a/org.argeo.util/src/org/argeo/util/DictionaryKeys.java b/org.argeo.util/src/org/argeo/util/DictionaryKeys.java
deleted file mode 100644 (file)
index d17c86f..0000000
+++ /dev/null
@@ -1,42 +0,0 @@
-package org.argeo.util;
-
-import java.util.Dictionary;
-import java.util.Enumeration;
-import java.util.Iterator;
-
-/**
- * Access the keys of a {@link String}-keyed {@link Dictionary} (common throughout
- * the OSGi APIs) as an {@link Iterable} so that they are easily usable in
- * for-each loops.
- */
-class DictionaryKeys implements Iterable<String> {
-       private final Dictionary<String, ?> dictionary;
-
-       public DictionaryKeys(Dictionary<String, ?> dictionary) {
-               this.dictionary = dictionary;
-       }
-
-       @Override
-       public Iterator<String> iterator() {
-               return new KeyIterator(dictionary.keys());
-       }
-
-       private static class KeyIterator implements Iterator<String> {
-               private final Enumeration<String> keys;
-
-               KeyIterator(Enumeration<String> keys) {
-                       this.keys = keys;
-               }
-
-               @Override
-               public boolean hasNext() {
-                       return keys.hasMoreElements();
-               }
-
-               @Override
-               public String next() {
-                       return keys.nextElement();
-               }
-
-       }
-}
diff --git a/org.argeo.util/src/org/argeo/util/DigestUtils.java b/org.argeo.util/src/org/argeo/util/DigestUtils.java
deleted file mode 100644 (file)
index ce01800..0000000
+++ /dev/null
@@ -1,201 +0,0 @@
-package org.argeo.util;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.channels.FileChannel.MapMode;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-
-/** Utilities around cryptographic digests */
-public class DigestUtils {
-       public final static String MD5 = "MD5";
-       public final static String SHA1 = "SHA1";
-       public final static String SHA256 = "SHA-256";
-       public final static String SHA512 = "SHA-512";
-
-       private static Boolean debug = false;
-       // TODO: make it configurable
-       private final static Integer byteBufferCapacity = 100 * 1024;// 100 KB
-
-       public static byte[] sha1(byte[] bytes) {
-               try {
-                       MessageDigest digest = MessageDigest.getInstance(SHA1);
-                       digest.update(bytes);
-                       byte[] checksum = digest.digest();
-                       return checksum;
-               } catch (NoSuchAlgorithmException e) {
-                       throw new IllegalArgumentException(e);
-               }
-       }
-
-       public static String digest(String algorithm, byte[] bytes) {
-               try {
-                       MessageDigest digest = MessageDigest.getInstance(algorithm);
-                       digest.update(bytes);
-                       byte[] checksum = digest.digest();
-                       String res = encodeHexString(checksum);
-                       return res;
-               } catch (NoSuchAlgorithmException e) {
-                       throw new IllegalArgumentException("Cannot digest with algorithm " + algorithm, e);
-               }
-       }
-
-       public static String digest(String algorithm, InputStream in) {
-               try {
-                       MessageDigest digest = MessageDigest.getInstance(algorithm);
-                       // ReadableByteChannel channel = Channels.newChannel(in);
-                       // ByteBuffer bb = ByteBuffer.allocateDirect(byteBufferCapacity);
-                       // while (channel.read(bb) > 0)
-                       // digest.update(bb);
-                       byte[] buffer = new byte[byteBufferCapacity];
-                       int read = 0;
-                       while ((read = in.read(buffer)) > 0) {
-                               digest.update(buffer, 0, read);
-                       }
-
-                       byte[] checksum = digest.digest();
-                       String res = encodeHexString(checksum);
-                       return res;
-               } catch (NoSuchAlgorithmException e) {
-                       throw new IllegalArgumentException("Cannot digest with algorithm " + algorithm, e);
-               } catch (IOException e) {
-                       throw new RuntimeException(e);
-               } finally {
-                       StreamUtils.closeQuietly(in);
-               }
-       }
-
-       public static String digest(String algorithm, File file) {
-               FileInputStream fis = null;
-               FileChannel fc = null;
-               try {
-                       fis = new FileInputStream(file);
-                       fc = fis.getChannel();
-
-                       // Get the file's size and then map it into memory
-                       int sz = (int) fc.size();
-                       ByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, sz);
-                       return digest(algorithm, bb);
-               } catch (IOException e) {
-                       throw new IllegalArgumentException("Cannot digest " + file + " with algorithm " + algorithm, e);
-               } finally {
-                       StreamUtils.closeQuietly(fis);
-                       if (fc.isOpen())
-                               try {
-                                       fc.close();
-                               } catch (IOException e) {
-                                       // silent
-                               }
-               }
-       }
-
-       protected static String digest(String algorithm, ByteBuffer bb) {
-               long begin = System.currentTimeMillis();
-               try {
-                       MessageDigest digest = MessageDigest.getInstance(algorithm);
-                       digest.update(bb);
-                       byte[] checksum = digest.digest();
-                       String res = encodeHexString(checksum);
-                       long end = System.currentTimeMillis();
-                       if (debug)
-                               System.out.println((end - begin) + " ms / " + ((end - begin) / 1000) + " s");
-                       return res;
-               } catch (NoSuchAlgorithmException e) {
-                       throw new IllegalArgumentException("Cannot digest with algorithm " + algorithm, e);
-               }
-       }
-
-       public static String sha1hex(Path path) {
-               return digest(SHA1, path, byteBufferCapacity);
-       }
-
-       public static String digest(String algorithm, Path path, long bufferSize) {
-               byte[] digest = digestRaw(algorithm, path, bufferSize);
-               return encodeHexString(digest);
-       }
-
-       public static byte[] digestRaw(String algorithm, Path file, long bufferSize) {
-               long begin = System.currentTimeMillis();
-               try {
-                       MessageDigest md = MessageDigest.getInstance(algorithm);
-                       FileChannel fc = FileChannel.open(file);
-                       long fileSize = Files.size(file);
-                       if (fileSize <= bufferSize) {
-                               ByteBuffer bb = fc.map(MapMode.READ_ONLY, 0, fileSize);
-                               md.update(bb);
-                       } else {
-                               long lastCycle = (fileSize / bufferSize) - 1;
-                               long position = 0;
-                               for (int i = 0; i <= lastCycle; i++) {
-                                       ByteBuffer bb;
-                                       if (i != lastCycle) {
-                                               bb = fc.map(MapMode.READ_ONLY, position, bufferSize);
-                                               position = position + bufferSize;
-                                       } else {
-                                               bb = fc.map(MapMode.READ_ONLY, position, fileSize - position);
-                                               position = fileSize;
-                                       }
-                                       md.update(bb);
-                               }
-                       }
-                       long end = System.currentTimeMillis();
-                       if (debug)
-                               System.out.println((end - begin) + " ms / " + ((end - begin) / 1000) + " s");
-                       return md.digest();
-               } catch (NoSuchAlgorithmException e) {
-                       throw new IllegalArgumentException("Cannot digest " + file + "  with algorithm " + algorithm, e);
-               } catch (IOException e) {
-                       throw new RuntimeException("Cannot digest " + file + "  with algorithm " + algorithm, e);
-               }
-       }
-
-       public static void main(String[] args) {
-               File file;
-               if (args.length > 0)
-                       file = new File(args[0]);
-               else {
-                       System.err.println("Usage: <file> [<algorithm>]" + " (see http://java.sun.com/j2se/1.5.0/"
-                                       + "docs/guide/security/CryptoSpec.html#AppA)");
-                       return;
-               }
-
-               if (args.length > 1) {
-                       String algorithm = args[1];
-                       System.out.println(digest(algorithm, file));
-               } else {
-                       String algorithm = "MD5";
-                       System.out.println(algorithm + ": " + digest(algorithm, file));
-                       algorithm = "SHA";
-                       System.out.println(algorithm + ": " + digest(algorithm, file));
-                       System.out.println(algorithm + ": " + sha1hex(file.toPath()));
-                       algorithm = "SHA-256";
-                       System.out.println(algorithm + ": " + digest(algorithm, file));
-                       algorithm = "SHA-512";
-                       System.out.println(algorithm + ": " + digest(algorithm, file));
-               }
-       }
-
-       final private static char[] hexArray = "0123456789abcdef".toCharArray();
-
-       /**
-        * From
-        * http://stackoverflow.com/questions/9655181/how-to-convert-a-byte-array-to
-        * -a-hex-string-in-java
-        */
-       public static String encodeHexString(byte[] bytes) {
-               char[] hexChars = new char[bytes.length * 2];
-               for (int j = 0; j < bytes.length; j++) {
-                       int v = bytes[j] & 0xFF;
-                       hexChars[j * 2] = hexArray[v >>> 4];
-                       hexChars[j * 2 + 1] = hexArray[v & 0x0F];
-               }
-               return new String(hexChars);
-       }
-
-}
diff --git a/org.argeo.util/src/org/argeo/util/DirH.java b/org.argeo.util/src/org/argeo/util/DirH.java
deleted file mode 100644 (file)
index b6d962f..0000000
+++ /dev/null
@@ -1,116 +0,0 @@
-package org.argeo.util;
-
-import java.io.IOException;
-import java.io.PrintStream;
-import java.nio.charset.Charset;
-import java.nio.file.DirectoryStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/** Hashes the hashes of the files in a directory. */
-public class DirH {
-
-       private final static Charset charset = Charset.forName("UTF-16");
-       private final static long bufferSize = 200 * 1024 * 1024;
-       private final static String algorithm = "SHA";
-
-       private final static byte EOL = (byte) '\n';
-       private final static byte SPACE = (byte) ' ';
-
-       private final int hashSize;
-
-       private final byte[][] hashes;
-       private final byte[][] fileNames;
-       private final byte[] digest;
-       private final byte[] dirName;
-
-       /**
-        * @param dirName can be null or empty
-        */
-       private DirH(byte[][] hashes, byte[][] fileNames, byte[] dirName) {
-               if (hashes.length != fileNames.length)
-                       throw new IllegalArgumentException(hashes.length + " hashes and " + fileNames.length + " file names");
-               this.hashes = hashes;
-               this.fileNames = fileNames;
-               this.dirName = dirName == null ? new byte[0] : dirName;
-               if (hashes.length == 0) {// empty dir
-                       hashSize = 20;
-                       // FIXME what is the digest of an empty dir?
-                       digest = new byte[hashSize];
-                       Arrays.fill(digest, SPACE);
-                       return;
-               }
-               hashSize = hashes[0].length;
-               for (int i = 0; i < hashes.length; i++) {
-                       if (hashes[i].length != hashSize)
-                               throw new IllegalArgumentException(
-                                               "Hash size for " + new String(fileNames[i], charset) + " is " + hashes[i].length);
-               }
-
-               try {
-                       MessageDigest md = MessageDigest.getInstance(algorithm);
-                       for (int i = 0; i < hashes.length; i++) {
-                               md.update(this.hashes[i]);
-                               md.update(SPACE);
-                               md.update(this.fileNames[i]);
-                               md.update(EOL);
-                       }
-                       digest = md.digest();
-               } catch (NoSuchAlgorithmException e) {
-                       throw new IllegalArgumentException("Cannot digest", e);
-               }
-       }
-
-       public void print(PrintStream out) {
-               out.print(DigestUtils.encodeHexString(digest));
-               if (dirName.length > 0) {
-                       out.print(' ');
-                       out.print(new String(dirName, charset));
-               }
-               out.print('\n');
-               for (int i = 0; i < hashes.length; i++) {
-                       out.print(DigestUtils.encodeHexString(hashes[i]));
-                       out.print(' ');
-                       out.print(new String(fileNames[i], charset));
-                       out.print('\n');
-               }
-       }
-
-       public static DirH digest(Path dir) {
-               try (DirectoryStream<Path> files = Files.newDirectoryStream(dir)) {
-                       List<byte[]> hs = new ArrayList<byte[]>();
-                       List<String> fNames = new ArrayList<>();
-                       for (Path file : files) {
-                               if (!Files.isDirectory(file)) {
-                                       byte[] digest = DigestUtils.digestRaw(algorithm, file, bufferSize);
-                                       hs.add(digest);
-                                       fNames.add(file.getFileName().toString());
-                               }
-                       }
-
-                       byte[][] fileNames = new byte[fNames.size()][];
-                       for (int i = 0; i < fNames.size(); i++) {
-                               fileNames[i] = fNames.get(i).getBytes(charset);
-                       }
-                       byte[][] hashes = hs.toArray(new byte[hs.size()][]);
-                       return new DirH(hashes, fileNames, dir.toString().getBytes(charset));
-               } catch (IOException e) {
-                       throw new RuntimeException("Cannot digest " + dir, e);
-               }
-       }
-
-       public static void main(String[] args) {
-               try {
-                       DirH dirH = DirH.digest(Paths.get("/home/mbaudier/tmp/"));
-                       dirH.print(System.out);
-               } catch (Exception e) {
-                       e.printStackTrace();
-               }
-       }
-}
diff --git a/org.argeo.util/src/org/argeo/util/LangUtils.java b/org.argeo.util/src/org/argeo/util/LangUtils.java
deleted file mode 100644 (file)
index 7824d12..0000000
+++ /dev/null
@@ -1,253 +0,0 @@
-package org.argeo.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Writer;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardOpenOption;
-import java.time.ZonedDateTime;
-import java.time.temporal.ChronoUnit;
-import java.time.temporal.Temporal;
-import java.util.Dictionary;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.Hashtable;
-import java.util.Map;
-import java.util.Properties;
-
-import javax.naming.InvalidNameException;
-import javax.naming.ldap.LdapName;
-
-/** Utilities around Java basic features. */
-public class LangUtils {
-       /*
-        * NON-API OSGi
-        */
-       /**
-        * Returns an array with the names of the provided classes. Useful when
-        * registering services with multiple interfaces in OSGi.
-        */
-       public static String[] names(Class<?>... clzz) {
-               String[] res = new String[clzz.length];
-               for (int i = 0; i < clzz.length; i++)
-                       res[i] = clzz[i].getName();
-               return res;
-       }
-
-       /*
-        * MAP
-        */
-       /**
-        * Creates a new {@link Dictionary} with one key-value pair. Key should not be
-        * null, but if the value is null, it returns an empty {@link Dictionary}.
-        */
-       public static Map<String, Object> map(String key, Object value) {
-               assert key != null;
-               HashMap<String, Object> props = new HashMap<>();
-               if (value != null)
-                       props.put(key, value);
-               return props;
-       }
-
-       /*
-        * DICTIONARY
-        */
-
-       /**
-        * Creates a new {@link Dictionary} with one key-value pair. Key should not be
-        * null, but if the value is null, it returns an empty {@link Dictionary}.
-        */
-       public static Dictionary<String, Object> dict(String key, Object value) {
-               assert key != null;
-               Hashtable<String, Object> props = new Hashtable<>();
-               if (value != null)
-                       props.put(key, value);
-               return props;
-       }
-
-       /** @deprecated Use {@link #dict(String, Object)} instead. */
-       @Deprecated
-       public static Dictionary<String, Object> dico(String key, Object value) {
-               return dict(key, value);
-       }
-
-       /** Converts a {@link Dictionary} to a {@link Map} of strings. */
-       public static Map<String, String> dictToStringMap(Dictionary<String, ?> properties) {
-               if (properties == null) {
-                       return null;
-               }
-               Map<String, String> res = new HashMap<>(properties.size());
-               Enumeration<String> keys = properties.keys();
-               while (keys.hasMoreElements()) {
-                       String key = keys.nextElement();
-                       res.put(key, properties.get(key).toString());
-               }
-               return res;
-       }
-
-       /**
-        * Get a string property from this map, expecting to find it, or
-        * <code>null</code> if not found.
-        */
-       public static String get(Map<String, ?> map, String key) {
-               Object res = map.get(key);
-               if (res == null)
-                       return null;
-               return res.toString();
-       }
-
-       /**
-        * Get a string property from this map, expecting to find it.
-        * 
-        * @throws IllegalArgumentException if the key was not found
-        */
-       public static String getNotNull(Map<String, ?> map, String key) {
-               Object res = map.get(key);
-               if (res == null)
-                       throw new IllegalArgumentException("Map " + map + " should contain key " + key);
-               return res.toString();
-       }
-
-       /**
-        * Wraps the keys of the provided {@link Dictionary} as an {@link Iterable}.
-        */
-       public static Iterable<String> keys(Dictionary<String, ?> props) {
-               assert props != null;
-               return new DictionaryKeys(props);
-       }
-
-       static String toJson(Dictionary<String, ?> props) {
-               return toJson(props, false);
-       }
-
-       static String toJson(Dictionary<String, ?> props, boolean pretty) {
-               StringBuilder sb = new StringBuilder();
-               sb.append('{');
-               if (pretty)
-                       sb.append('\n');
-               Enumeration<String> keys = props.keys();
-               while (keys.hasMoreElements()) {
-                       String key = keys.nextElement();
-                       if (pretty)
-                               sb.append(' ');
-                       sb.append('\"').append(key).append('\"');
-                       if (pretty)
-                               sb.append(" : ");
-                       else
-                               sb.append(':');
-                       sb.append('\"').append(props.get(key)).append('\"');
-                       if (keys.hasMoreElements())
-                               sb.append(", ");
-                       if (pretty)
-                               sb.append('\n');
-               }
-               sb.append('}');
-               return sb.toString();
-       }
-
-       static void storeAsProperties(Dictionary<String, Object> props, Path path) throws IOException {
-               if (props == null)
-                       throw new IllegalArgumentException("Props cannot be null");
-               Properties toStore = new Properties();
-               for (Enumeration<String> keys = props.keys(); keys.hasMoreElements();) {
-                       String key = keys.nextElement();
-                       toStore.setProperty(key, props.get(key).toString());
-               }
-               try (OutputStream out = Files.newOutputStream(path)) {
-                       toStore.store(out, null);
-               }
-       }
-
-       static void appendAsLdif(String dnBase, String dnKey, Dictionary<String, Object> props, Path path)
-                       throws IOException {
-               if (props == null)
-                       throw new IllegalArgumentException("Props cannot be null");
-               Object dnValue = props.get(dnKey);
-               String dnStr = dnKey + '=' + dnValue + ',' + dnBase;
-               LdapName dn;
-               try {
-                       dn = new LdapName(dnStr);
-               } catch (InvalidNameException e) {
-                       throw new IllegalArgumentException("Cannot interpret DN " + dnStr, e);
-               }
-               if (dnValue == null)
-                       throw new IllegalArgumentException("DN key " + dnKey + " must have a value");
-               try (Writer writer = Files.newBufferedWriter(path, StandardOpenOption.APPEND, StandardOpenOption.CREATE)) {
-                       writer.append("\ndn: ");
-                       writer.append(dn.toString());
-                       writer.append('\n');
-                       for (Enumeration<String> keys = props.keys(); keys.hasMoreElements();) {
-                               String key = keys.nextElement();
-                               Object value = props.get(key);
-                               writer.append(key);
-                               writer.append(": ");
-                               // FIXME deal with binary and multiple values
-                               writer.append(value.toString());
-                               writer.append('\n');
-                       }
-               }
-       }
-
-       static Dictionary<String, Object> loadFromProperties(Path path) throws IOException {
-               Properties toLoad = new Properties();
-               try (InputStream in = Files.newInputStream(path)) {
-                       toLoad.load(in);
-               }
-               Dictionary<String, Object> res = new Hashtable<String, Object>();
-               for (Object key : toLoad.keySet())
-                       res.put(key.toString(), toLoad.get(key));
-               return res;
-       }
-
-       /*
-        * EXCEPTIONS
-        */
-       /**
-        * Chain the messages of all causes (one per line, <b>starts with a line
-        * return</b>) without all the stack
-        */
-       public static String chainCausesMessages(Throwable t) {
-               StringBuffer buf = new StringBuffer();
-               chainCauseMessage(buf, t);
-               return buf.toString();
-       }
-
-       /** Recursive chaining of messages */
-       private static void chainCauseMessage(StringBuffer buf, Throwable t) {
-               buf.append('\n').append(' ').append(t.getClass().getCanonicalName()).append(": ").append(t.getMessage());
-               if (t.getCause() != null)
-                       chainCauseMessage(buf, t.getCause());
-       }
-
-       /*
-        * TIME
-        */
-       /** Formats time elapsed since start. */
-       public static String since(ZonedDateTime start) {
-               ZonedDateTime now = ZonedDateTime.now();
-               return duration(start, now);
-       }
-
-       /** Formats a duration. */
-       public static String duration(Temporal start, Temporal end) {
-               long count = ChronoUnit.DAYS.between(start, end);
-               if (count != 0)
-                       return count > 1 ? count + " days" : count + " day";
-               count = ChronoUnit.HOURS.between(start, end);
-               if (count != 0)
-                       return count > 1 ? count + " hours" : count + " hours";
-               count = ChronoUnit.MINUTES.between(start, end);
-               if (count != 0)
-                       return count > 1 ? count + " minutes" : count + " minute";
-               count = ChronoUnit.SECONDS.between(start, end);
-               return count > 1 ? count + " seconds" : count + " second";
-       }
-
-       /** Singleton constructor. */
-       private LangUtils() {
-
-       }
-
-}
diff --git a/org.argeo.util/src/org/argeo/util/OS.java b/org.argeo.util/src/org/argeo/util/OS.java
deleted file mode 100644 (file)
index d8127b6..0000000
+++ /dev/null
@@ -1,56 +0,0 @@
-package org.argeo.util;
-
-import java.io.File;
-import java.lang.management.ManagementFactory;
-
-/** When OS specific informations are needed. */
-public class OS {
-       public final static OS LOCAL = new OS();
-
-       private final String arch, name, version;
-
-       /** The OS of the running JVM */
-       protected OS() {
-               arch = System.getProperty("os.arch");
-               name = System.getProperty("os.name");
-               version = System.getProperty("os.version");
-       }
-
-       public String getArch() {
-               return arch;
-       }
-
-       public String getName() {
-               return name;
-       }
-
-       public String getVersion() {
-               return version;
-       }
-
-       public boolean isMSWindows() {
-               // only MS Windows would use such an horrendous separator...
-               return File.separatorChar == '\\';
-       }
-
-       public String[] getDefaultShellCommand() {
-               if (!isMSWindows())
-                       return new String[] { "/bin/sh", "-l", "-i" };
-               else
-                       return new String[] { "cmd.exe", "/C" };
-       }
-
-       public static Integer getJvmPid() {
-               /*
-                * This method works on most platforms (including Linux). Although when Java 9
-                * comes along, there is a better way: long pid =
-                * ProcessHandle.current().getPid();
-                *
-                * See:
-                * http://stackoverflow.com/questions/35842/how-can-a-java-program-get-its-own-
-                * process-id
-                */
-               String pidAndHost = ManagementFactory.getRuntimeMXBean().getName();
-               return Integer.parseInt(pidAndHost.substring(0, pidAndHost.indexOf('@')));
-       }
-}
diff --git a/org.argeo.util/src/org/argeo/util/PasswordEncryption.java b/org.argeo.util/src/org/argeo/util/PasswordEncryption.java
deleted file mode 100644 (file)
index c95c787..0000000
+++ /dev/null
@@ -1,216 +0,0 @@
-package org.argeo.util;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.nio.charset.Charset;
-import java.nio.charset.StandardCharsets;
-import java.security.GeneralSecurityException;
-import java.security.InvalidKeyException;
-import java.security.Key;
-
-import javax.crypto.Cipher;
-import javax.crypto.CipherInputStream;
-import javax.crypto.CipherOutputStream;
-import javax.crypto.SecretKey;
-import javax.crypto.SecretKeyFactory;
-import javax.crypto.spec.IvParameterSpec;
-import javax.crypto.spec.PBEKeySpec;
-import javax.crypto.spec.SecretKeySpec;
-
-public class PasswordEncryption {
-       public final static Integer DEFAULT_ITERATION_COUNT = 1024;
-       /** Stronger with 256, but causes problem with Oracle JVM */
-       public final static Integer DEFAULT_SECRETE_KEY_LENGTH = 256;
-       public final static Integer DEFAULT_SECRETE_KEY_LENGTH_RESTRICTED = 128;
-       public final static String DEFAULT_SECRETE_KEY_FACTORY = "PBKDF2WithHmacSHA1";
-       public final static String DEFAULT_SECRETE_KEY_ENCRYPTION = "AES";
-       public final static String DEFAULT_CIPHER_NAME = "AES/CBC/PKCS5Padding";
-//     public final static String DEFAULT_CHARSET = "UTF-8";
-       public final static Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
-
-       private Integer iterationCount = DEFAULT_ITERATION_COUNT;
-       private Integer secreteKeyLength = DEFAULT_SECRETE_KEY_LENGTH;
-       private String secreteKeyFactoryName = DEFAULT_SECRETE_KEY_FACTORY;
-       private String secreteKeyEncryption = DEFAULT_SECRETE_KEY_ENCRYPTION;
-       private String cipherName = DEFAULT_CIPHER_NAME;
-
-       private static byte[] DEFAULT_SALT_8 = { (byte) 0xA9, (byte) 0x9B, (byte) 0xC8, (byte) 0x32, (byte) 0x56,
-                       (byte) 0x35, (byte) 0xE3, (byte) 0x03 };
-       private static byte[] DEFAULT_IV_16 = { (byte) 0xA9, (byte) 0x9B, (byte) 0xC8, (byte) 0x32, (byte) 0x56,
-                       (byte) 0x35, (byte) 0xE3, (byte) 0x03, (byte) 0xA9, (byte) 0x9B, (byte) 0xC8, (byte) 0x32, (byte) 0x56,
-                       (byte) 0x35, (byte) 0xE3, (byte) 0x03 };
-
-       private Key key;
-       private Cipher ecipher;
-       private Cipher dcipher;
-
-       private String securityProviderName = null;
-
-       /**
-        * This is up to the caller to clear the passed array. Neither copy of nor
-        * reference to the passed array is kept
-        */
-       public PasswordEncryption(char[] password) {
-               this(password, DEFAULT_SALT_8, DEFAULT_IV_16);
-       }
-
-       /**
-        * This is up to the caller to clear the passed array. Neither copies of nor
-        * references to the passed arrays are kept
-        */
-       public PasswordEncryption(char[] password, byte[] passwordSalt, byte[] initializationVector) {
-               try {
-                       initKeyAndCiphers(password, passwordSalt, initializationVector);
-               } catch (InvalidKeyException e) {
-                       Integer previousSecreteKeyLength = secreteKeyLength;
-                       secreteKeyLength = DEFAULT_SECRETE_KEY_LENGTH_RESTRICTED;
-                       System.err.println("'" + e.getMessage() + "', will use " + secreteKeyLength
-                                       + " secrete key length instead of " + previousSecreteKeyLength);
-                       try {
-                               initKeyAndCiphers(password, passwordSalt, initializationVector);
-                       } catch (GeneralSecurityException e1) {
-                               throw new IllegalStateException("Cannot get secret key (with restricted length)", e1);
-                       }
-               } catch (GeneralSecurityException e) {
-                       throw new IllegalStateException("Cannot get secret key", e);
-               }
-       }
-
-       protected void initKeyAndCiphers(char[] password, byte[] passwordSalt, byte[] initializationVector)
-                       throws GeneralSecurityException {
-               byte[] salt = new byte[8];
-               System.arraycopy(passwordSalt, 0, salt, 0, salt.length);
-               // for (int i = 0; i < password.length && i < salt.length; i++)
-               // salt[i] = (byte) password[i];
-               byte[] iv = new byte[16];
-               System.arraycopy(initializationVector, 0, iv, 0, iv.length);
-
-               SecretKeyFactory keyFac = SecretKeyFactory.getInstance(getSecretKeyFactoryName());
-               PBEKeySpec keySpec = new PBEKeySpec(password, salt, getIterationCount(), getKeyLength());
-               String secKeyEncryption = getSecretKeyEncryption();
-               if (secKeyEncryption != null) {
-                       SecretKey tmp = keyFac.generateSecret(keySpec);
-                       key = new SecretKeySpec(tmp.getEncoded(), getSecretKeyEncryption());
-               } else {
-                       key = keyFac.generateSecret(keySpec);
-               }
-               if (securityProviderName != null)
-                       ecipher = Cipher.getInstance(getCipherName(), securityProviderName);
-               else
-                       ecipher = Cipher.getInstance(getCipherName());
-               ecipher.init(Cipher.ENCRYPT_MODE, key, new IvParameterSpec(iv));
-               dcipher = Cipher.getInstance(getCipherName());
-               dcipher.init(Cipher.DECRYPT_MODE, key, new IvParameterSpec(iv));
-       }
-
-       public void encrypt(InputStream decryptedIn, OutputStream encryptedOut) throws IOException {
-               try {
-                       CipherOutputStream out = new CipherOutputStream(encryptedOut, ecipher);
-                       StreamUtils.copy(decryptedIn, out);
-                       StreamUtils.closeQuietly(out);
-               } catch (IOException e) {
-                       throw e;
-               } finally {
-                       StreamUtils.closeQuietly(decryptedIn);
-               }
-       }
-
-       public void decrypt(InputStream encryptedIn, OutputStream decryptedOut) throws IOException {
-               try {
-                       CipherInputStream decryptedIn = new CipherInputStream(encryptedIn, dcipher);
-                       StreamUtils.copy(decryptedIn, decryptedOut);
-               } catch (IOException e) {
-                       throw e;
-               } finally {
-                       StreamUtils.closeQuietly(encryptedIn);
-               }
-       }
-
-       public byte[] encryptString(String str) {
-               ByteArrayOutputStream out = null;
-               ByteArrayInputStream in = null;
-               try {
-                       out = new ByteArrayOutputStream();
-                       in = new ByteArrayInputStream(str.getBytes(DEFAULT_CHARSET));
-                       encrypt(in, out);
-                       return out.toByteArray();
-               } catch (IOException e) {
-                       throw new RuntimeException(e);
-               } finally {
-                       StreamUtils.closeQuietly(out);
-               }
-       }
-
-       /** Closes the input stream */
-       public String decryptAsString(InputStream in) {
-               ByteArrayOutputStream out = null;
-               try {
-                       out = new ByteArrayOutputStream();
-                       decrypt(in, out);
-                       return new String(out.toByteArray(), DEFAULT_CHARSET);
-               } catch (IOException e) {
-                       throw new RuntimeException(e);
-               } finally {
-                       StreamUtils.closeQuietly(out);
-               }
-       }
-
-       protected Key getKey() {
-               return key;
-       }
-
-       protected Cipher getEcipher() {
-               return ecipher;
-       }
-
-       protected Cipher getDcipher() {
-               return dcipher;
-       }
-
-       protected Integer getIterationCount() {
-               return iterationCount;
-       }
-
-       protected Integer getKeyLength() {
-               return secreteKeyLength;
-       }
-
-       protected String getSecretKeyFactoryName() {
-               return secreteKeyFactoryName;
-       }
-
-       protected String getSecretKeyEncryption() {
-               return secreteKeyEncryption;
-       }
-
-       protected String getCipherName() {
-               return cipherName;
-       }
-
-       public void setIterationCount(Integer iterationCount) {
-               this.iterationCount = iterationCount;
-       }
-
-       public void setSecreteKeyLength(Integer keyLength) {
-               this.secreteKeyLength = keyLength;
-       }
-
-       public void setSecreteKeyFactoryName(String secreteKeyFactoryName) {
-               this.secreteKeyFactoryName = secreteKeyFactoryName;
-       }
-
-       public void setSecreteKeyEncryption(String secreteKeyEncryption) {
-               this.secreteKeyEncryption = secreteKeyEncryption;
-       }
-
-       public void setCipherName(String cipherName) {
-               this.cipherName = cipherName;
-       }
-
-       public void setSecurityProviderName(String securityProviderName) {
-               this.securityProviderName = securityProviderName;
-       }
-}
diff --git a/org.argeo.util/src/org/argeo/util/ServiceChannel.java b/org.argeo.util/src/org/argeo/util/ServiceChannel.java
deleted file mode 100644 (file)
index 7997384..0000000
+++ /dev/null
@@ -1,78 +0,0 @@
-package org.argeo.util;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.channels.AsynchronousByteChannel;
-import java.nio.channels.CompletionHandler;
-import java.nio.channels.ReadableByteChannel;
-import java.nio.channels.WritableByteChannel;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
-
-/** An {@link AsynchronousByteChannel} based on an {@link ExecutorService}. */
-public class ServiceChannel implements AsynchronousByteChannel {
-       private final ReadableByteChannel in;
-       private final WritableByteChannel out;
-
-       private boolean open = true;
-
-       private ExecutorService executor;
-
-       public ServiceChannel(ReadableByteChannel in, WritableByteChannel out, ExecutorService executor) {
-               this.in = in;
-               this.out = out;
-               this.executor = executor;
-       }
-
-       @Override
-       public Future<Integer> read(ByteBuffer dst) {
-               return executor.submit(() -> in.read(dst));
-       }
-
-       @Override
-       public <A> void read(ByteBuffer dst, A attachment, CompletionHandler<Integer, ? super A> handler) {
-               try {
-                       Future<Integer> res = read(dst);
-                       handler.completed(res.get(), attachment);
-               } catch (Exception e) {
-                       handler.failed(e, attachment);
-               }
-       }
-
-       @Override
-       public Future<Integer> write(ByteBuffer src) {
-               return executor.submit(() -> out.write(src));
-       }
-
-       @Override
-       public <A> void write(ByteBuffer src, A attachment, CompletionHandler<Integer, ? super A> handler) {
-               try {
-                       Future<Integer> res = write(src);
-                       handler.completed(res.get(), attachment);
-               } catch (Exception e) {
-                       handler.failed(e, attachment);
-               }
-       }
-
-       @Override
-       public synchronized void close() throws IOException {
-               try {
-                       in.close();
-               } catch (Exception e) {
-                       e.printStackTrace();
-               }
-               try {
-                       out.close();
-               } catch (Exception e) {
-                       e.printStackTrace();
-               }
-               open = false;
-               notifyAll();
-       }
-
-       @Override
-       public synchronized boolean isOpen() {
-               return open;
-       }
-
-}
diff --git a/org.argeo.util/src/org/argeo/util/StreamUtils.java b/org.argeo.util/src/org/argeo/util/StreamUtils.java
deleted file mode 100644 (file)
index 6d7d940..0000000
+++ /dev/null
@@ -1,81 +0,0 @@
-package org.argeo.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Reader;
-import java.io.Writer;
-
-/** Utilities to be used when Apache Commons IO is not available. */
-class StreamUtils {
-       private static final int DEFAULT_BUFFER_SIZE = 1024 * 4;
-
-       /*
-        * APACHE COMMONS IO (inspired)
-        */
-
-       /** @return the number of bytes */
-       public static Long copy(InputStream in, OutputStream out)
-                       throws IOException {
-               Long count = 0l;
-               byte[] buf = new byte[DEFAULT_BUFFER_SIZE];
-               while (true) {
-                       int length = in.read(buf);
-                       if (length < 0)
-                               break;
-                       out.write(buf, 0, length);
-                       count = count + length;
-               }
-               return count;
-       }
-
-       /** @return the number of chars */
-       public static Long copy(Reader in, Writer out) throws IOException {
-               Long count = 0l;
-               char[] buf = new char[DEFAULT_BUFFER_SIZE];
-               while (true) {
-                       int length = in.read(buf);
-                       if (length < 0)
-                               break;
-                       out.write(buf, 0, length);
-                       count = count + length;
-               }
-               return count;
-       }
-
-       public static void closeQuietly(InputStream in) {
-               if (in != null)
-                       try {
-                               in.close();
-                       } catch (Exception e) {
-                               //
-                       }
-       }
-
-       public static void closeQuietly(OutputStream out) {
-               if (out != null)
-                       try {
-                               out.close();
-                       } catch (Exception e) {
-                               //
-                       }
-       }
-
-       public static void closeQuietly(Reader in) {
-               if (in != null)
-                       try {
-                               in.close();
-                       } catch (Exception e) {
-                               //
-                       }
-       }
-
-       public static void closeQuietly(Writer out) {
-               if (out != null)
-                       try {
-                               out.close();
-                       } catch (Exception e) {
-                               //
-                       }
-       }
-}
diff --git a/org.argeo.util/src/org/argeo/util/Tester.java b/org.argeo.util/src/org/argeo/util/Tester.java
deleted file mode 100644 (file)
index 31a2be4..0000000
+++ /dev/null
@@ -1,126 +0,0 @@
-package org.argeo.util;
-
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-/** A generic tester based on Java assertions and functional programming. */
-public class Tester {
-       private Map<String, TesterStatus> results = Collections.synchronizedSortedMap(new TreeMap<>());
-
-       private ClassLoader classLoader;
-
-       /** Use {@link Thread#getContextClassLoader()} by default. */
-       public Tester() {
-               this(Thread.currentThread().getContextClassLoader());
-       }
-
-       public Tester(ClassLoader classLoader) {
-               this.classLoader = classLoader;
-       }
-
-       public void execute(String className) {
-               Class<?> clss;
-               try {
-                       clss = classLoader.loadClass(className);
-                       boolean assertionsEnabled = clss.desiredAssertionStatus();
-                       if (!assertionsEnabled)
-                               throw new IllegalStateException("Test runner " + getClass().getName()
-                                               + " requires Java assertions to be enabled. Call the JVM with the -ea argument.");
-               } catch (Exception e1) {
-                       throw new IllegalArgumentException("Cannot initalise test for " + className, e1);
-
-               }
-               List<Method> methods = findMethods(clss);
-               if (methods.size() == 0)
-                       throw new IllegalArgumentException("No test method found in " + clss);
-               // TODO make order more predictable?
-               for (Method method : methods) {
-                       String uid = method.getDeclaringClass().getName() + "#" + method.getName();
-                       TesterStatus testStatus = new TesterStatus(uid);
-                       Object obj = null;
-                       try {
-                               beforeTest(uid, method);
-                               obj = clss.getDeclaredConstructor().newInstance();
-                               method.invoke(obj);
-                               testStatus.setPassed();
-                               afterTestPassed(uid, method, obj);
-                       } catch (Exception e) {
-                               testStatus.setFailed(e);
-                               afterTestFailed(uid, method, obj, e);
-                       } finally {
-                               results.put(uid, testStatus);
-                       }
-               }
-       }
-
-       protected void beforeTest(String uid, Method method) {
-               // System.out.println(uid + ": STARTING");
-       }
-
-       protected void afterTestPassed(String uid, Method method, Object obj) {
-               System.out.println(uid + ": PASSED");
-       }
-
-       protected void afterTestFailed(String uid, Method method, Object obj, Throwable e) {
-               System.out.println(uid + ": FAILED");
-               e.printStackTrace();
-       }
-
-       protected List<Method> findMethods(Class<?> clss) {
-               List<Method> methods = new ArrayList<Method>();
-//             Method call = getMethod(clss, "call");
-//             if (call != null)
-//                     methods.add(call);
-//
-               for (Method method : clss.getMethods()) {
-                       if (method.getName().startsWith("test")) {
-                               methods.add(method);
-                       }
-               }
-               return methods;
-       }
-
-       protected Method getMethod(Class<?> clss, String name, Class<?>... parameterTypes) {
-               try {
-                       return clss.getMethod(name, parameterTypes);
-               } catch (NoSuchMethodException e) {
-                       return null;
-               } catch (SecurityException e) {
-                       throw new IllegalStateException(e);
-               }
-       }
-
-       public static void main(String[] args) {
-               // deal with arguments
-               String className;
-               if (args.length < 1) {
-                       System.err.println(usage());
-                       System.exit(1);
-                       throw new IllegalArgumentException();
-               } else {
-                       className = args[0];
-               }
-
-               Tester test = new Tester();
-               try {
-                       test.execute(className);
-               } catch (Throwable e) {
-                       e.printStackTrace();
-               }
-
-               Map<String, TesterStatus> r = test.results;
-               for (String uid : r.keySet()) {
-                       TesterStatus testStatus = r.get(uid);
-                       System.out.println(testStatus);
-               }
-       }
-
-       public static String usage() {
-               return "java " + Tester.class.getName() + " [test class name]";
-
-       }
-}
diff --git a/org.argeo.util/src/org/argeo/util/TesterStatus.java b/org.argeo.util/src/org/argeo/util/TesterStatus.java
deleted file mode 100644 (file)
index d1d14ed..0000000
+++ /dev/null
@@ -1,98 +0,0 @@
-package org.argeo.util;
-
-import java.io.Serializable;
-
-/** The status of a test. */
-public class TesterStatus implements Serializable {
-       private static final long serialVersionUID = 6272975746885487000L;
-
-       private Boolean passed = null;
-       private final String uid;
-       private Throwable throwable = null;
-
-       public TesterStatus(String uid) {
-               this.uid = uid;
-       }
-
-       /** For cloning. */
-       public TesterStatus(String uid, Boolean passed, Throwable throwable) {
-               this(uid);
-               this.passed = passed;
-               this.throwable = throwable;
-       }
-
-       public synchronized Boolean isRunning() {
-               return passed == null;
-       }
-
-       public synchronized Boolean isPassed() {
-               assert passed != null;
-               return passed;
-       }
-
-       public synchronized Boolean isFailed() {
-               assert passed != null;
-               return !passed;
-       }
-
-       public synchronized void setPassed() {
-               setStatus(true);
-       }
-
-       public synchronized void setFailed() {
-               setStatus(false);
-       }
-
-       public synchronized void setFailed(Throwable throwable) {
-               setStatus(false);
-               setThrowable(throwable);
-       }
-
-       protected void setStatus(Boolean passed) {
-               if (this.passed != null)
-                       throw new IllegalStateException("Passed status of test " + uid + " is already set (to " + passed + ")");
-               this.passed = passed;
-       }
-
-       protected void setThrowable(Throwable throwable) {
-               if (this.throwable != null)
-                       throw new IllegalStateException("Throwable of test " + uid + " is already set (to " + passed + ")");
-               this.throwable = throwable;
-       }
-
-       public String getUid() {
-               return uid;
-       }
-
-       public Throwable getThrowable() {
-               return throwable;
-       }
-
-       @Override
-       protected Object clone() throws CloneNotSupportedException {
-               // TODO Auto-generated method stub
-               return super.clone();
-       }
-
-       @Override
-       public boolean equals(Object o) {
-               if (o instanceof TesterStatus) {
-                       TesterStatus other = (TesterStatus) o;
-                       // we don't check consistency for performance purposes
-                       // this equals() is supposed to be used in collections or for transfer
-                       return other.uid.equals(uid);
-               }
-               return false;
-       }
-
-       @Override
-       public int hashCode() {
-               return uid.hashCode();
-       }
-
-       @Override
-       public String toString() {
-               return uid + "\t" + (passed ? "passed" : "failed");
-       }
-
-}
diff --git a/org.argeo.util/src/org/argeo/util/Throughput.java b/org.argeo.util/src/org/argeo/util/Throughput.java
deleted file mode 100644 (file)
index 266ddbc..0000000
+++ /dev/null
@@ -1,82 +0,0 @@
-package org.argeo.util;
-
-import java.text.NumberFormat;
-import java.text.ParseException;
-import java.util.Locale;
-
-/** A throughput, that is, a value per unit of time. */
-public class Throughput {
-       private final static NumberFormat usNumberFormat = NumberFormat.getInstance(Locale.US);
-
-       public enum Unit {
-               s, m, h, d
-       }
-
-       private final Double value;
-       private final Unit unit;
-
-       public Throughput(Double value, Unit unit) {
-               this.value = value;
-               this.unit = unit;
-       }
-
-       public Throughput(Long periodMs, Long count, Unit unit) {
-               if (unit.equals(Unit.s))
-                       value = ((double) count * 1000d) / periodMs;
-               else if (unit.equals(Unit.m))
-                       value = ((double) count * 60d * 1000d) / periodMs;
-               else if (unit.equals(Unit.h))
-                       value = ((double) count * 60d * 60d * 1000d) / periodMs;
-               else if (unit.equals(Unit.d))
-                       value = ((double) count * 24d * 60d * 60d * 1000d) / periodMs;
-               else
-                       throw new IllegalArgumentException("Unsupported unit " + unit);
-               this.unit = unit;
-       }
-
-       public Throughput(Double value, String unitStr) {
-               this(value, Unit.valueOf(unitStr));
-       }
-
-       public Throughput(String def) {
-               int index = def.indexOf('/');
-               if (def.length() < 3 || index <= 0 || index != def.length() - 2)
-                       throw new IllegalArgumentException(
-                                       def + " no a proper throughput definition" + " (should be <value>/<unit>, e.g. 3.54/s or 1500/h");
-               String valueStr = def.substring(0, index);
-               String unitStr = def.substring(index + 1);
-               try {
-                       this.value = usNumberFormat.parse(valueStr).doubleValue();
-               } catch (ParseException e) {
-                       throw new IllegalArgumentException("Cannot parse " + valueStr + " as a number.", e);
-               }
-               this.unit = Unit.valueOf(unitStr);
-       }
-
-       public Long asMsPeriod() {
-               if (unit.equals(Unit.s))
-                       return Math.round(1000d / value);
-               else if (unit.equals(Unit.m))
-                       return Math.round((60d * 1000d) / value);
-               else if (unit.equals(Unit.h))
-                       return Math.round((60d * 60d * 1000d) / value);
-               else if (unit.equals(Unit.d))
-                       return Math.round((24d * 60d * 60d * 1000d) / value);
-               else
-                       throw new IllegalArgumentException("Unsupported unit " + unit);
-       }
-
-       @Override
-       public String toString() {
-               return usNumberFormat.format(value) + '/' + unit;
-       }
-
-       public Double getValue() {
-               return value;
-       }
-
-       public Unit getUnit() {
-               return unit;
-       }
-
-}
diff --git a/org.argeo.util/src/org/argeo/util/UuidUtils.java b/org.argeo.util/src/org/argeo/util/UuidUtils.java
deleted file mode 100644 (file)
index ebe0978..0000000
+++ /dev/null
@@ -1,374 +0,0 @@
-package org.argeo.util;
-
-import java.net.InetAddress;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.net.UnknownHostException;
-import java.security.SecureRandom;
-import java.time.Duration;
-import java.time.LocalDateTime;
-import java.time.ZoneOffset;
-import java.util.BitSet;
-import java.util.Random;
-import java.util.UUID;
-import java.util.concurrent.atomic.AtomicInteger;
-
-/**
- * Utilities to simplify and extends usage of {@link UUID}. Only the RFC 4122
- * variant (also known as Leach–Salz variant) is supported.
- */
-public class UuidUtils {
-       /** Nil UUID (00000000-0000-0000-0000-000000000000). */
-       public final static UUID NIL_UUID = UUID.fromString("00000000-0000-0000-0000-000000000000");
-       public final static LocalDateTime GREGORIAN_START = LocalDateTime.of(1582, 10, 15, 0, 0, 0);
-
-       private final static long MOST_SIG_VERSION1 = (1l << 12);
-       private final static long LEAST_SIG_RFC4122_VARIANT = (1l << 63);
-
-       private final static SecureRandom RANDOM;
-       private final static AtomicInteger CLOCK_SEQUENCE;
-       private final static byte[] HARDWARE_ADDRESS;
-       /** A start timestamp to which {@link System#nanoTime()}/100 can be added. */
-       private final static long START_TIMESTAMP;
-       static {
-               RANDOM = new SecureRandom();
-               CLOCK_SEQUENCE = new AtomicInteger(RANDOM.nextInt(16384));
-               HARDWARE_ADDRESS = getHardwareAddress();
-
-               long nowVm = System.nanoTime() / 100;
-               Duration duration = Duration.between(GREGORIAN_START, LocalDateTime.now(ZoneOffset.UTC));
-               START_TIMESTAMP = (duration.getSeconds() * 10000000 + duration.getNano() / 100) - nowVm;
-       }
-
-       private static byte[] getHardwareAddress() {
-               InetAddress localHost;
-               try {
-                       localHost = InetAddress.getLocalHost();
-                       try {
-                               NetworkInterface nic = NetworkInterface.getByInetAddress(localHost);
-                               return nic.getHardwareAddress();
-                       } catch (SocketException e) {
-                               return null;
-                       }
-               } catch (UnknownHostException e) {
-                       return null;
-               }
-
-       }
-
-       public static UUID timeUUIDwithRandomNode() {
-               long timestamp = START_TIMESTAMP + System.nanoTime() / 100;
-               return timeUUID(timestamp, RANDOM);
-       }
-
-       public static UUID timeUUID(long timestamp, Random random) {
-               byte[] node = new byte[6];
-               random.nextBytes(node);
-               node[0] = (byte) (node[0] | 1);
-               long clockSequence = CLOCK_SEQUENCE.incrementAndGet();
-               return timeUUID(timestamp, clockSequence, node);
-       }
-
-       public static UUID timeUUID() {
-               long timestamp = START_TIMESTAMP + System.nanoTime() / 100;
-               return timeUUID(timestamp);
-       }
-
-       public static UUID timeUUID(long timestamp) {
-               if (HARDWARE_ADDRESS == null)
-                       return timeUUID(timestamp, RANDOM);
-               long clockSequence = CLOCK_SEQUENCE.incrementAndGet();
-               return timeUUID(timestamp, clockSequence, HARDWARE_ADDRESS);
-       }
-
-       public static UUID timeUUID(long timestamp, NetworkInterface nic) {
-               byte[] node;
-               try {
-                       node = nic.getHardwareAddress();
-               } catch (SocketException e) {
-                       throw new IllegalStateException("Cannot get hardware address", e);
-               }
-               long clockSequence = CLOCK_SEQUENCE.incrementAndGet();
-               return timeUUID(timestamp, clockSequence, node);
-       }
-
-       public static UUID timeUUID(LocalDateTime time, long clockSequence, byte[] node) {
-               Duration duration = Duration.between(GREGORIAN_START, time);
-               // Number of 100 ns intervals in one second: 1000000000 / 100 = 10000000
-               long timestamp = duration.getSeconds() * 10000000 + duration.getNano() / 100;
-               return timeUUID(timestamp, clockSequence, node);
-       }
-
-       public static UUID timeUUID(long timestamp, long clockSequence, byte[] node) {
-               assert node.length >= 6;
-
-               long mostSig = MOST_SIG_VERSION1 // base for version 1 UUID
-                               | ((timestamp & 0xFFFFFFFFL) << 32) // time_low
-                               | (((timestamp >> 32) & 0xFFFFL) << 16) // time_mid
-                               | ((timestamp >> 48) & 0x0FFFL);// time_hi_and_version
-
-               long leastSig = LEAST_SIG_RFC4122_VARIANT // base for Leach–Salz UUID
-                               | (((clockSequence & 0x3F00) >> 8) << 56) // clk_seq_hi_res
-                               | ((clockSequence & 0xFF) << 48) // clk_seq_low
-                               | (node[0] & 0xFFL) //
-                               | ((node[1] & 0xFFL) << 8) //
-                               | ((node[2] & 0xFFL) << 16) //
-                               | ((node[3] & 0xFFL) << 24) //
-                               | ((node[4] & 0xFFL) << 32) //
-                               | ((node[5] & 0xFFL) << 40); //
-//             for (int i = 0; i < 6; i++) {
-//                     leastSig = leastSig | ((node[i] & 0xFFL) << (8 * i));
-//             }
-               UUID uuid = new UUID(mostSig, leastSig);
-
-               // tests
-               assert uuid.node() == BitSet.valueOf(node).toLongArray()[0];
-               assert uuid.timestamp() == timestamp;
-               assert uuid.clockSequence() == clockSequence;
-               assert uuid.version() == 1;
-               assert uuid.variant() == 2;
-               return uuid;
-       }
-
-       @Deprecated
-       public static UUID timeBasedUUID() {
-               return timeBasedUUID(LocalDateTime.now(ZoneOffset.UTC));
-       }
-
-       @Deprecated
-       public static UUID timeBasedRandomUUID() {
-               return timeBasedRandomUUID(LocalDateTime.now(ZoneOffset.UTC), RANDOM);
-       }
-
-       @Deprecated
-       public static UUID timeBasedUUID(LocalDateTime time) {
-               if (HARDWARE_ADDRESS == null)
-                       return timeBasedRandomUUID(time, RANDOM);
-               return timeBasedUUID(time, BitSet.valueOf(HARDWARE_ADDRESS));
-       }
-
-       @Deprecated
-       public static UUID timeBasedAddressUUID(LocalDateTime time, NetworkInterface nic) throws SocketException {
-               byte[] nodeBytes = nic.getHardwareAddress();
-               BitSet node = BitSet.valueOf(nodeBytes);
-               return timeBasedUUID(time, node);
-       }
-
-       @Deprecated
-       public static UUID timeBasedRandomUUID(LocalDateTime time, Random random) {
-               byte[] nodeBytes = new byte[6];
-               random.nextBytes(nodeBytes);
-               BitSet node = BitSet.valueOf(nodeBytes);
-               // set random marker
-               node.set(0, true);
-               return timeBasedUUID(time, node);
-       }
-
-       @Deprecated
-       public static UUID timeBasedUUID(LocalDateTime time, BitSet node) {
-               // most significant
-               Duration duration = Duration.between(GREGORIAN_START, time);
-
-               // Number of 100 ns intervals in one second: 1000000000 / 100 = 10000000
-               long timeNanos = duration.getSeconds() * 10000000 + duration.getNano() / 100;
-               BitSet timeBits = BitSet.valueOf(new long[] { timeNanos });
-               assert timeBits.length() <= 60;
-
-               int clockSequence;
-               synchronized (CLOCK_SEQUENCE) {
-                       clockSequence = CLOCK_SEQUENCE.incrementAndGet();
-                       if (clockSequence > 16384)
-                               CLOCK_SEQUENCE.set(0);
-               }
-               BitSet clockSequenceBits = BitSet.valueOf(new long[] { clockSequence });
-
-               // Build the UUID, bit by bit
-               // see https://tools.ietf.org/html/rfc4122#section-4.2.2
-               // time
-               BitSet time_low = new BitSet(32);
-               BitSet time_mid = new BitSet(16);
-               BitSet time_hi_and_version = new BitSet(16);
-
-               for (int i = 0; i < 60; i++) {
-                       if (i < 32)
-                               time_low.set(i, timeBits.get(i));
-                       else if (i < 48)
-                               time_mid.set(i - 32, timeBits.get(i));
-                       else
-                               time_hi_and_version.set(i - 48, timeBits.get(i));
-               }
-               // version
-               time_hi_and_version.set(12, true);
-               time_hi_and_version.set(13, false);
-               time_hi_and_version.set(14, false);
-               time_hi_and_version.set(15, false);
-
-               // clock sequence
-               BitSet clk_seq_hi_res = new BitSet(8);
-               BitSet clk_seq_low = new BitSet(8);
-               for (int i = 0; i < 8; i++) {
-                       clk_seq_low.set(i, clockSequenceBits.get(i));
-               }
-               for (int i = 8; i < 14; i++) {
-                       clk_seq_hi_res.set(i - 8, clockSequenceBits.get(i));
-               }
-               // variant
-               clk_seq_hi_res.set(6, false);
-               clk_seq_hi_res.set(7, true);
-
-//             String str = toHexString(time_low.toLongArray()[0]) + "-" + toHexString(time_mid.toLongArray()[0]) + "-"
-//                             + toHexString(time_hi_and_version.toLongArray()[0]) + "-"
-//                             + toHexString(clock_seq_hi_and_reserved.toLongArray()[0]) + toHexString(clock_seq_low.toLongArray()[0])
-//                             + "-" + toHexString(node.toLongArray()[0]);
-//             UUID uuid = UUID.fromString(str);
-
-               BitSet uuidBits = new BitSet(128);
-               for (int i = 0; i < 128; i++) {
-                       if (i < 48)
-                               uuidBits.set(i, node.get(i));
-                       else if (i < 56)
-                               uuidBits.set(i, clk_seq_low.get(i - 48));
-                       else if (i < 64)
-                               uuidBits.set(i, clk_seq_hi_res.get(i - 56));
-                       else if (i < 80)
-                               uuidBits.set(i, time_hi_and_version.get(i - 64));
-                       else if (i < 96)
-                               uuidBits.set(i, time_mid.get(i - 80));
-                       else
-                               uuidBits.set(i, time_low.get(i - 96));
-               }
-
-               long[] uuidLongs = uuidBits.toLongArray();
-               assert uuidLongs.length == 2;
-               UUID uuid = new UUID(uuidLongs[1], uuidLongs[0]);
-
-               // tests
-               assert uuid.node() == node.toLongArray()[0];
-               assert uuid.timestamp() == timeNanos;
-               assert uuid.clockSequence() == clockSequence;
-               assert uuid.version() == 1;
-               assert uuid.variant() == 2;
-               return uuid;
-       }
-
-       public static String toBinaryString(UUID uuid, int charsPerSegment, char separator) {
-               String binaryString = toBinaryString(uuid);
-               StringBuilder sb = new StringBuilder(128 + (128 / charsPerSegment));
-               for (int i = 0; i < binaryString.length(); i++) {
-                       if (i != 0 && i % charsPerSegment == 0)
-                               sb.append(separator);
-                       sb.append(binaryString.charAt(i));
-               }
-               return sb.toString();
-       }
-
-       public static String toBinaryString(UUID uuid) {
-               String most = zeroTo64Chars(Long.toBinaryString(uuid.getMostSignificantBits()));
-               String least = zeroTo64Chars(Long.toBinaryString(uuid.getLeastSignificantBits()));
-               String binaryString = most + least;
-               assert binaryString.length() == 128;
-               return binaryString;
-       }
-
-       private static String zeroTo64Chars(String str) {
-               assert str.length() <= 64;
-               if (str.length() < 64) {
-                       StringBuilder sb = new StringBuilder(64);
-                       for (int i = 0; i < 64 - str.length(); i++)
-                               sb.append('0');
-                       sb.append(str);
-                       return sb.toString();
-               } else
-                       return str;
-       }
-
-       public static String compactToStd(String compact) {
-               if (compact.length() != 32)
-                       throw new IllegalArgumentException(
-                                       "Compact UUID '" + compact + "' has length " + compact.length() + " and not 32.");
-               StringBuilder sb = new StringBuilder(36);
-               for (int i = 0; i < 32; i++) {
-                       if (i == 8 || i == 12 || i == 16 || i == 20)
-                               sb.append('-');
-                       sb.append(compact.charAt(i));
-               }
-               String std = sb.toString();
-               assert std.length() == 36;
-               assert UUID.fromString(std).toString().equals(std);
-               return std;
-       }
-
-       public static UUID compactToUuid(String compact) {
-               return UUID.fromString(compactToStd(compact));
-       }
-
-       public static boolean isRandom(UUID uuid) {
-               return uuid.version() == 4;
-       }
-
-       public static boolean isTimeBased(UUID uuid) {
-               return uuid.version() == 1;
-       }
-
-       public static boolean isTimeBasedRandom(UUID uuid) {
-               if (uuid.version() == 1) {
-                       BitSet node = BitSet.valueOf(new long[] { uuid.node() });
-                       return node.get(0);
-               } else
-                       return false;
-       }
-
-       public static boolean isNameBased(UUID uuid) {
-               return uuid.version() == 3 || uuid.version() == 5;
-       }
-
-       /** Singleton. */
-       private UuidUtils() {
-       }
-
-       public final static void main(String[] args) throws Exception {
-               UUID uuid;
-
-//             uuid = compactToUuid("996b1f5122de4b2f94e49168d32f22d1");
-//             System.out.println(uuid.toString() + ", isRandom=" + isRandom(uuid));
-
-               // warm up before measuring perf
-               for (int i = 0; i < 10; i++) {
-                       UUID.randomUUID();
-                       timeUUID();
-                       timeUUIDwithRandomNode();
-                       timeBasedRandomUUID();
-                       timeBasedUUID();
-               }
-
-               long begin;
-               long duration;
-
-               begin = System.nanoTime();
-               uuid = UUID.randomUUID();
-               duration = System.nanoTime() - begin;
-               System.out.println(uuid.toString() + " in " + duration + " ns, isRandom=" + isRandom(uuid));
-
-               begin = System.nanoTime();
-               uuid = timeUUID();
-               duration = System.nanoTime() - begin;
-               System.out.println(uuid.toString() + " in " + duration + " ns, isTimeBasedRandom=" + isTimeBasedRandom(uuid));
-
-               begin = System.nanoTime();
-               uuid = timeUUIDwithRandomNode();
-               duration = System.nanoTime() - begin;
-               System.out.println(uuid.toString() + " in " + duration + " ns, isTimeBasedRandom=" + isTimeBasedRandom(uuid));
-
-               begin = System.nanoTime();
-               uuid = timeBasedUUID();
-               duration = System.nanoTime() - begin;
-               System.out.println(uuid.toString() + " in " + duration + " ns, isTimeBasedRandom=" + isTimeBasedRandom(uuid));
-
-               begin = System.nanoTime();
-               uuid = timeBasedRandomUUID();
-               duration = System.nanoTime() - begin;
-               System.out.println(uuid.toString() + " in " + duration + " ns, isTimeBasedRandom=" + isTimeBasedRandom(uuid));
-//             System.out.println(toBinaryString(uuid, 8, ' '));
-//             System.out.println(toBinaryString(uuid, 16, '\n'));
-       }
-}
diff --git a/org.argeo.util/src/org/argeo/util/package-info.java b/org.argeo.util/src/org/argeo/util/package-info.java
deleted file mode 100644 (file)
index 4354b0a..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-/** Generic Java utilities. */
-package org.argeo.util;
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index e143ba9e950bb92825daf789c5ded58eb22251fd..fbc2d287fe77e81f427b5ca009c0208641b2d725 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -23,7 +23,6 @@
        </properties>
        <modules>
                <!-- Base -->
-               <module>org.argeo.util</module>
                <module>org.argeo.enterprise</module>
                <module>org.argeo.jcr</module>
                <module>org.argeo.osgi.boot</module>