org.argeo.slc.api \
org.argeo.slc.factory \
org.argeo.slc.runtime \
-cms/org.argeo.slc.cms \
+org.argeo.slc.cms \
swt/rap/org.argeo.tool.server \
-VPATH = .:cms:swt/rap
-
clean:
rm -rf $(BUILD_BASE)
-A2_OUTPUT = $(SDK_BUILD_BASE)/a2
-A2_BASE = $(A2_OUTPUT)
-
DEP_CATEGORIES = \
org.argeo.tp \
org.argeo.tp.sdk \
org.argeo.tp.formats \
org.argeo.tp.gis \
org.argeo.cms \
+org.argeo.cms.jcr \
swt/rap/org.argeo.cms \
GRAALVM_HOME = /opt/graalvm-ce
BUNDLES = \
swt/rcp/org.argeo.tool.desktop \
-VPATH = .:cms:swt/rcp
-
clean:
rm -rf $(BUILD_BASE)
-include $(SDK_SRC_BASE)/cnf/unstable.bnd
+BRANCH=unstable
\ No newline at end of file
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-17"/>
- <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
- <classpathentry kind="src" path="src"/>
- <classpathentry kind="output" path="bin"/>
-</classpath>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>org.argeo.slc.cms</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.ManifestBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.SchemaBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.pde.PluginNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
+++ /dev/null
-Import-Package: \
-org.apache.commons.logging,\
-org.osgi.*;version="0.0.0",\
-*
\ No newline at end of file
+++ /dev/null
-source.. = src/
-output.. = bin/
-bin.includes = META-INF/,\
- .
-additional.bundles = org.argeo.init
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-import org.apache.commons.vfs2.provider.sftp.SftpFileSystemConfigBuilder;
-
-/**
- * Simplify atomic backups implementation, especially by managing VFS.
- */
-public abstract class AbstractAtomicBackup implements AtomicBackup {
- private String name;
- private String compression = "bz2";
-
- protected abstract void writeBackup(FileObject targetFo);
-
- public AbstractAtomicBackup() {
- }
-
- public AbstractAtomicBackup(String name) {
- this.name = name;
- }
-
- public void init() {
- if (name == null)
- throw new MaintenanceException("Atomic backup name must be set");
- }
-
- public void destroy() {
-
- }
-
- @Override
- public String backup(FileSystemManager fileSystemManager,
- String backupsBase, BackupContext backupContext,
- FileSystemOptions opts) {
- if (name == null)
- throw new MaintenanceException("Atomic backup name must be set");
-
- FileObject targetFo = null;
- try {
- if (backupsBase.startsWith("sftp:"))
- SftpFileSystemConfigBuilder.getInstance()
- .setStrictHostKeyChecking(opts, "no");
- if (compression == null || compression.equals("none"))
- targetFo = fileSystemManager.resolveFile(backupsBase + '/'
- + backupContext.getRelativeFolder() + '/' + name, opts);
- else if (compression.equals("bz2"))
- targetFo = fileSystemManager.resolveFile("bz2:" + backupsBase
- + '/' + backupContext.getRelativeFolder() + '/' + name
- + ".bz2" + "!" + name, opts);
- else if (compression.equals("gz"))
- targetFo = fileSystemManager.resolveFile("gz:" + backupsBase
- + '/' + backupContext.getRelativeFolder() + '/' + name
- + ".gz" + "!" + name, opts);
- else
- throw new MaintenanceException("Unsupported compression "
- + compression);
-
- writeBackup(targetFo);
-
- return targetFo.toString();
- } catch (Exception e) {
- throw new MaintenanceException("Cannot backup " + name + " to "
- + targetFo, e);
- } finally {
- BackupUtils.closeFOQuietly(targetFo);
- }
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
- public String getName() {
- return name;
- }
-
- public void setCompression(String compression) {
- this.compression = compression;
- }
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-
-/** Performs the backup of a single component, typically a database dump */
-public interface AtomicBackup {
- /** Name identifiying this backup */
- public String getName();
-
- /**
- * Retrieves the data of the component in a format that allows to restore
- * the component
- *
- * @param backupContext
- * the context of this backup
- * @return the VFS URI of the generated file or directory
- */
- public String backup(FileSystemManager fileSystemManager,
- String backupsBase, BackupContext backupContext,
- FileSystemOptions opts);
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import java.text.DateFormat;
-import java.util.Date;
-
-/**
- * Transient information of a given backup, centralizing common information such
- * as timestamp and location.
- */
-public interface BackupContext {
- /** Backup date */
- public Date getTimestamp();
-
- /** Formatted backup date */
- public String getTimestampAsString();
-
- /** System name */
- public String getSystemName();
-
- /** Local base */
- public String getRelativeFolder();
-
- /** Date format */
- public DateFormat getDateFormat();
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import org.apache.commons.vfs2.FileSystemException;
-import org.apache.commons.vfs2.impl.DefaultFileSystemManager;
-import org.apache.commons.vfs2.provider.bzip2.Bzip2FileProvider;
-import org.apache.commons.vfs2.provider.ftp.FtpFileProvider;
-import org.apache.commons.vfs2.provider.gzip.GzipFileProvider;
-import org.apache.commons.vfs2.provider.local.DefaultLocalFileProvider;
-import org.apache.commons.vfs2.provider.ram.RamFileProvider;
-import org.apache.commons.vfs2.provider.sftp.SftpFileProvider;
-import org.apache.commons.vfs2.provider.url.UrlFileProvider;
-
-/**
- * Programatically configured VFS file system manager which can be declared as a
- * bean and associated with a life cycle (methods
- * {@link DefaultFileSystemManager#init()} and
- * {@link DefaultFileSystemManager#close()}). Supports bz2, file, ram, gzip,
- * ftp, sftp
- */
-public class BackupFileSystemManager extends DefaultFileSystemManager {
-
- public BackupFileSystemManager() {
- super();
- try {
- addProvider("file", new DefaultLocalFileProvider());
- addProvider("bz2", new Bzip2FileProvider());
- addProvider("ftp", new FtpFileProvider());
- addProvider("sftp", new SftpFileProvider());
- addProvider("gzip", new GzipFileProvider());
- addProvider("ram", new RamFileProvider());
- setDefaultProvider(new UrlFileProvider());
- } catch (FileSystemException e) {
- throw new MaintenanceException("Cannot configure backup file provider", e);
- }
- }
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import java.text.DateFormat;
-
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-
-/** Purges previous backups */
-public interface BackupPurge {
- /**
- * Purge the backups identified by these arguments. Although these are the
- * same fields as a {@link BackupContext} we don't pass it as argument since
- * we want to use this interface to purge remote backups as well (that is,
- * with a different base), or outside the scope of a running backup.
- */
- public void purge(FileSystemManager fileSystemManager, String base,
- String name, DateFormat dateFormat, FileSystemOptions opts);
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backup utilities */
-public class BackupUtils {
- /** Close a file object quietly even if it is null or throws an exception. */
- public static void closeFOQuietly(FileObject fo) {
- if (fo != null) {
- try {
- fo.close();
- } catch (Exception e) {
- // silent
- }
- }
- }
-
- /** Prevents instantiation */
- private BackupUtils() {
- }
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-@Deprecated
-class MaintenanceException extends RuntimeException {
- private static final long serialVersionUID = -5770049663929537270L;
-
- public MaintenanceException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public MaintenanceException(String message) {
- super(message);
- }
-
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backups a MySQL database using mysqldump. */
-public class MySqlBackup extends OsCallBackup {
- private String mysqldumpLocation = "/usr/bin/mysqldump";
-
- private String dbUser;
- private String dbPassword;
- private String dbName;
-
- public MySqlBackup() {
- }
-
- public MySqlBackup(String dbUser, String dbPassword, String dbName) {
- this.dbUser = dbUser;
- this.dbPassword = dbPassword;
- this.dbName = dbName;
- init();
- }
-
- @Override
- public void init() {
- if (getName() == null)
- setName(dbName + ".mysql");
- super.init();
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- if (getCommand() == null)
- setCommand(mysqldumpLocation
- + " --lock-tables --add-locks --add-drop-table"
- + " -u ${dbUser} --password=${dbPassword} --databases ${dbName}");
- getVariables().put("dbUser", dbUser);
- getVariables().put("dbPassword", dbPassword);
- getVariables().put("dbName", dbName);
-
- super.writeBackup(targetFo);
- }
-
- public void setDbUser(String dbUser) {
- this.dbUser = dbUser;
- }
-
- public void setDbPassword(String dbPassword) {
- this.dbPassword = dbPassword;
- }
-
- public void setDbName(String dbName) {
- this.dbName = dbName;
- }
-
- public void setMysqldumpLocation(String mysqldumpLocation) {
- this.mysqldumpLocation = mysqldumpLocation;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backups an OpenLDAP server using slapcat */
-public class OpenLdapBackup extends OsCallBackup {
- private String slapcatLocation = "/usr/sbin/slapcat";
- private String slapdConfLocation = "/etc/openldap/slapd.conf";
- private String baseDn;
-
- public OpenLdapBackup() {
- super();
- }
-
- public OpenLdapBackup(String baseDn) {
- super();
- this.baseDn = baseDn;
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- if (baseDn == null)
- throw new MaintenanceException("Base DN must be set");
-
- if (getCommand() == null)
- setCommand(slapcatLocation
- + " -f ${slapdConfLocation} -b '${baseDn}'");
- getVariables().put("slapdConfLocation", slapdConfLocation);
- getVariables().put("baseDn", baseDn);
-
- super.writeBackup(targetFo);
- }
-
- public void setSlapcatLocation(String slapcatLocation) {
- this.slapcatLocation = slapcatLocation;
- }
-
- public void setSlapdConfLocation(String slapdConfLocation) {
- this.slapdConfLocation = slapdConfLocation;
- }
-
- public void setBaseDn(String baseDn) {
- this.baseDn = baseDn;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import java.io.ByteArrayOutputStream;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.exec.DefaultExecutor;
-import org.apache.commons.exec.ExecuteException;
-import org.apache.commons.exec.ExecuteStreamHandler;
-import org.apache.commons.exec.Executor;
-import org.apache.commons.exec.PumpStreamHandler;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.vfs2.FileContent;
-import org.apache.commons.vfs2.FileObject;
-import org.argeo.api.cms.CmsLog;
-
-/**
- * Runs an OS command and save its standard output as a file. Typically used for
- * MySQL or OpenLDAP dumps.
- */
-public class OsCallBackup extends AbstractAtomicBackup {
- private final static CmsLog log = CmsLog.getLog(OsCallBackup.class);
-
- private String command;
- private Map<String, String> variables = new HashMap<String, String>();
- private Executor executor = new DefaultExecutor();
-
- private Map<String, String> environment = new HashMap<String, String>();
-
- /** Name of the sudo user, root if "", not sudo if null */
- private String sudo = null;
-
- public OsCallBackup() {
- }
-
- public OsCallBackup(String name) {
- super(name);
- }
-
- public OsCallBackup(String name, String command) {
- super(name);
- this.command = command;
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- String commandToUse = command;
-
- // sudo
- if (sudo != null) {
- if (sudo.equals(""))
- commandToUse = "sudo " + commandToUse;
- else
- commandToUse = "sudo -u " + sudo + " " + commandToUse;
- }
-
- CommandLine commandLine = CommandLine.parse(commandToUse, variables);
- ByteArrayOutputStream errBos = new ByteArrayOutputStream();
- if (log.isTraceEnabled())
- log.trace(commandLine.toString());
-
- try {
- // stdout
- FileContent targetContent = targetFo.getContent();
- // stderr
- ExecuteStreamHandler streamHandler = new PumpStreamHandler(targetContent.getOutputStream(), errBos);
- executor.setStreamHandler(streamHandler);
- executor.execute(commandLine, environment);
- } catch (ExecuteException e) {
- byte[] err = errBos.toByteArray();
- String errStr = new String(err);
- throw new MaintenanceException("Process " + commandLine + " failed (" + e.getExitValue() + "): " + errStr, e);
- } catch (Exception e) {
- byte[] err = errBos.toByteArray();
- String errStr = new String(err);
- throw new MaintenanceException("Process " + commandLine + " failed: " + errStr, e);
- } finally {
- IOUtils.closeQuietly(errBos);
- }
- }
-
- public void setCommand(String command) {
- this.command = command;
- }
-
- protected String getCommand() {
- return command;
- }
-
- /**
- * A reference to the environment variables that will be passed to the
- * process. Empty by default.
- */
- protected Map<String, String> getEnvironment() {
- return environment;
- }
-
- protected Map<String, String> getVariables() {
- return variables;
- }
-
- public void setVariables(Map<String, String> variables) {
- this.variables = variables;
- }
-
- public void setExecutor(Executor executor) {
- this.executor = executor;
- }
-
- public void setSudo(String sudo) {
- this.sudo = sudo;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backups a PostgreSQL database using pg_dump. */
-public class PostgreSqlBackup extends OsCallBackup {
- /**
- * PostgreSQL password environment variable (see
- * http://stackoverflow.com/questions
- * /2893954/how-to-pass-in-password-to-pg-dump)
- */
- protected final static String PGPASSWORD = "PGPASSWORD";
-
- private String pgDumpLocation = "/usr/bin/pg_dump";
-
- private String dbUser;
- private String dbPassword;
- private String dbName;
-
- public PostgreSqlBackup() {
- super();
- }
-
- public PostgreSqlBackup(String dbUser, String dbPassword, String dbName) {
- this.dbUser = dbUser;
- this.dbPassword = dbPassword;
- this.dbName = dbName;
- init();
- }
-
- @Override
- public void init() {
- // disable compression since pg_dump is used with -Fc option
- setCompression(null);
-
- if (getName() == null)
- setName(dbName + ".pgdump");
- super.init();
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- if (getCommand() == null) {
- getEnvironment().put(PGPASSWORD, dbPassword);
- setCommand(pgDumpLocation + " -Fc" + " -U ${dbUser} ${dbName}");
- }
- getVariables().put("dbUser", dbUser);
- getVariables().put("dbPassword", dbPassword);
- getVariables().put("dbName", dbName);
-
- super.writeBackup(targetFo);
- }
-
- public void setDbUser(String dbUser) {
- this.dbUser = dbUser;
- }
-
- public void setDbPassword(String dbPassword) {
- this.dbPassword = dbPassword;
- }
-
- public void setDbName(String dbName) {
- this.dbName = dbName;
- }
-
- public void setPgDumpLocation(String mysqldumpLocation) {
- this.pgDumpLocation = mysqldumpLocation;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.apache.commons.vfs2.FileSystemManager;
-
-/** Simple implementation of a backup context */
-public class SimpleBackupContext implements BackupContext {
- private DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmm");
- private final Date timestamp;
- private final String name;
-
- private final FileSystemManager fileSystemManager;
-
- public SimpleBackupContext(FileSystemManager fileSystemManager,
- String backupsBase, String name) {
- this.name = name;
- this.timestamp = new Date();
- this.fileSystemManager = fileSystemManager;
- }
-
- public Date getTimestamp() {
- return timestamp;
- }
-
- public String getTimestampAsString() {
- return dateFormat.format(timestamp);
- }
-
- public String getSystemName() {
- return name;
- }
-
- public String getRelativeFolder() {
- return name + '/' + getTimestampAsString();
- }
-
- public DateFormat getDateFormat() {
- return dateFormat;
- }
-
- public FileSystemManager getFileSystemManager() {
- return fileSystemManager;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import java.text.DateFormat;
-import java.time.Period;
-import java.time.ZoneId;
-import java.time.ZonedDateTime;
-import java.util.Date;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-import org.apache.commons.vfs2.FileObject;
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-import org.apache.commons.vfs2.Selectors;
-import org.argeo.api.cms.CmsLog;
-
-/** Simple backup purge which keeps backups only for a given number of days */
-public class SimpleBackupPurge implements BackupPurge {
- private final static CmsLog log = CmsLog.getLog(SimpleBackupPurge.class);
-
- private Integer daysKept = 30;
-
- @Override
- public void purge(FileSystemManager fileSystemManager, String base, String name, DateFormat dateFormat,
- FileSystemOptions opts) {
- try {
- ZonedDateTime nowDt = ZonedDateTime.now();
- FileObject baseFo = fileSystemManager.resolveFile(base + '/' + name, opts);
-
- SortedMap<ZonedDateTime, FileObject> toDelete = new TreeMap<ZonedDateTime, FileObject>();
- int backupCount = 0;
-
- // make sure base dir exists
- baseFo.createFolder();
-
- // scan backups and list those which should be deleted
- for (FileObject backupFo : baseFo.getChildren()) {
- String backupName = backupFo.getName().getBaseName();
- Date backupDate = dateFormat.parse(backupName);
- backupCount++;
- ZonedDateTime backupDt = ZonedDateTime.ofInstant(backupDate.toInstant(), ZoneId.systemDefault());
- Period sinceThen = Period.between(backupDt.toLocalDate(), nowDt.toLocalDate());
- // new Period(backupDt, nowDt);
- int days = sinceThen.getDays();
- // int days = sinceThen.getMinutes();
- if (days > daysKept) {
- toDelete.put(backupDt, backupFo);
- }
- }
-
- if (toDelete.size() != 0 && toDelete.size() == backupCount) {
- // all backups would be deleted
- // but we want to keep at least one
- ZonedDateTime lastBackupDt = toDelete.firstKey();
- FileObject keptFo = toDelete.remove(lastBackupDt);
- log.warn("Backup " + keptFo + " kept although it is older than " + daysKept + " days.");
- }
-
- // delete old backups
- for (FileObject backupFo : toDelete.values()) {
- backupFo.delete(Selectors.SELECT_ALL);
- if (log.isDebugEnabled())
- log.debug("Deleted backup " + backupFo);
- }
- } catch (Exception e) {
- throw new MaintenanceException("Could not purge previous backups", e);
- }
-
- }
-
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import java.io.File;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backups a Subversion repository using svnadmin. */
-public class SvnBackup extends OsCallBackup {
- private String svnadminLocation = "/usr/bin/svnadmin";
-
- private String repoLocation;
- private String repoName;
-
- public SvnBackup() {
- }
-
- public SvnBackup(String repoLocation) {
- this.repoLocation = repoLocation;
- init();
- }
-
- @Override
- public void init() {
- // use directory as repo name
- if (repoName == null)
- repoName = new File(repoLocation).getName();
-
- if (getName() == null)
- setName(repoName + ".svndump");
- super.init();
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- if (getCommand() == null) {
- setCommand(svnadminLocation + " dump " + " ${repoLocation}");
- }
- getVariables().put("repoLocation", repoLocation);
-
- super.writeBackup(targetFo);
- }
-
- public void setRepoLocation(String repoLocation) {
- this.repoLocation = repoLocation;
- }
-
- public void setRepoName(String repoName) {
- this.repoName = repoName;
- }
-
- public void setSvnadminLocation(String mysqldumpLocation) {
- this.svnadminLocation = mysqldumpLocation;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.backup.vfs;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.vfs2.FileObject;
-import org.apache.commons.vfs2.FileSystemException;
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-import org.apache.commons.vfs2.Selectors;
-import org.apache.commons.vfs2.UserAuthenticator;
-import org.apache.commons.vfs2.impl.DefaultFileSystemConfigBuilder;
-import org.argeo.api.cms.CmsLog;
-import org.argeo.util.LangUtils;
-
-/**
- * Combines multiple backups and transfer them to a remote location. Purges
- * remote and local data based on certain criteria.
- */
-public class SystemBackup implements Runnable {
- private final static CmsLog log = CmsLog.getLog(SystemBackup.class);
-
- private FileSystemManager fileSystemManager;
- private UserAuthenticator userAuthenticator = null;
-
- private String backupsBase;
- private String systemName;
-
- private List<AtomicBackup> atomicBackups = new ArrayList<AtomicBackup>();
- private BackupPurge backupPurge = new SimpleBackupPurge();
-
- private Map<String, UserAuthenticator> remoteBases = new HashMap<String, UserAuthenticator>();
-
- @Override
- public void run() {
- if (atomicBackups.size() == 0)
- throw new MaintenanceException("No atomic backup listed");
- List<String> failures = new ArrayList<String>();
-
- SimpleBackupContext backupContext = new SimpleBackupContext(fileSystemManager, backupsBase, systemName);
-
- // purge older backups
- FileSystemOptions opts = new FileSystemOptions();
- try {
- DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(opts, userAuthenticator);
- } catch (Exception e) {
- throw new MaintenanceException("Cannot create authentication", e);
- }
-
- try {
-
- backupPurge.purge(fileSystemManager, backupsBase, systemName, backupContext.getDateFormat(), opts);
- } catch (Exception e) {
- failures.add("Purge " + backupsBase + " failed: " + e.getMessage());
- log.error("Purge of " + backupsBase + " failed", e);
- }
-
- // perform backup
- for (AtomicBackup atomickBackup : atomicBackups) {
- try {
- String target = atomickBackup.backup(fileSystemManager, backupsBase, backupContext, opts);
- if (log.isDebugEnabled())
- log.debug("Performed backup " + target);
- } catch (Exception e) {
- String msg = "Atomic backup " + atomickBackup.getName() + " failed: "
- + LangUtils.chainCausesMessages(e);
- failures.add(msg);
- log.error(msg);
- if (log.isTraceEnabled())
- log.trace("Stacktrace of atomic backup " + atomickBackup.getName() + " failure.", e);
- }
- }
-
- // dispatch to remote
- for (String remoteBase : remoteBases.keySet()) {
- FileObject localBaseFo = null;
- FileObject remoteBaseFo = null;
- UserAuthenticator auth = remoteBases.get(remoteBase);
-
- // authentication
- FileSystemOptions remoteOpts = new FileSystemOptions();
- try {
- DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(remoteOpts, auth);
- backupPurge.purge(fileSystemManager, remoteBase, systemName, backupContext.getDateFormat(), remoteOpts);
- } catch (Exception e) {
- failures.add("Purge " + remoteBase + " failed: " + e.getMessage());
- log.error("Cannot purge " + remoteBase, e);
- }
-
- try {
- localBaseFo = fileSystemManager.resolveFile(backupsBase + '/' + backupContext.getRelativeFolder(),
- opts);
- remoteBaseFo = fileSystemManager.resolveFile(remoteBase + '/' + backupContext.getRelativeFolder(),
- remoteOpts);
- remoteBaseFo.copyFrom(localBaseFo, Selectors.SELECT_ALL);
- if (log.isDebugEnabled())
- log.debug("Copied backup to " + remoteBaseFo + " from " + localBaseFo);
- // }
- } catch (Exception e) {
- failures.add("Dispatch to " + remoteBase + " failed: " + e.getMessage());
- log.error("Cannot dispatch backups from " + backupContext.getRelativeFolder() + " to " + remoteBase, e);
- }
- BackupUtils.closeFOQuietly(localBaseFo);
- BackupUtils.closeFOQuietly(remoteBaseFo);
- }
-
- int failureCount = 0;
- if (failures.size() > 0) {
- StringBuffer buf = new StringBuffer();
- for (String failure : failures) {
- buf.append('\n').append(failureCount).append(" - ").append(failure);
- failureCount++;
- }
- throw new MaintenanceException(failureCount + " error(s) when running the backup,"
- + " check the logs and the backups as soon as possible." + buf);
- }
- }
-
- public void setFileSystemManager(FileSystemManager fileSystemManager) {
- this.fileSystemManager = fileSystemManager;
- }
-
- public void setBackupsBase(String backupsBase) {
- this.backupsBase = backupsBase;
- }
-
- public void setSystemName(String name) {
- this.systemName = name;
- }
-
- public void setAtomicBackups(List<AtomicBackup> atomicBackups) {
- this.atomicBackups = atomicBackups;
- }
-
- public void setBackupPurge(BackupPurge backupPurge) {
- this.backupPurge = backupPurge;
- }
-
- public void setUserAuthenticator(UserAuthenticator userAuthenticator) {
- this.userAuthenticator = userAuthenticator;
- }
-
- public void setRemoteBases(Map<String, UserAuthenticator> remoteBases) {
- this.remoteBases = remoteBases;
- }
-
- // public static void main(String args[]) {
- // while (true) {
- // try {
- // StandardFileSystemManager fsm = new StandardFileSystemManager();
- // fsm.init();
- //
- // SystemBackup systemBackup = new SystemBackup();
- // systemBackup.setSystemName("mySystem");
- // systemBackup
- // .setBackupsBase("/home/mbaudier/dev/src/commons/server/runtime/org.argeo.server.core/target");
- // systemBackup.setFileSystemManager(fsm);
- //
- // List<AtomicBackup> atomicBackups = new ArrayList<AtomicBackup>();
- //
- // MySqlBackup mySqlBackup = new MySqlBackup("root", "", "test");
- // atomicBackups.add(mySqlBackup);
- // PostgreSqlBackup postgreSqlBackup = new PostgreSqlBackup(
- // "argeo", "argeo", "gis_template");
- // atomicBackups.add(postgreSqlBackup);
- // SvnBackup svnBackup = new SvnBackup(
- // "/home/mbaudier/tmp/testsvnrepo");
- // atomicBackups.add(svnBackup);
- //
- // systemBackup.setAtomicBackups(atomicBackups);
- //
- // Map<String, UserAuthenticator> remoteBases = new HashMap<String,
- // UserAuthenticator>();
- // StaticUserAuthenticator userAuthenticator = new StaticUserAuthenticator(
- // null, "demo", "demo");
- // remoteBases.put("sftp://localhost/home/mbaudier/test",
- // userAuthenticator);
- // systemBackup.setRemoteBases(remoteBases);
- //
- // systemBackup.run();
- //
- // fsm.close();
- // } catch (FileSystemException e) {
- // // TODO Auto-generated catch block
- // e.printStackTrace();
- // System.exit(1);
- // }
- //
- // // wait
- // try {
- // Thread.sleep(120 * 1000);
- // } catch (InterruptedException e) {
- // e.printStackTrace();
- // }
- // }
- // }
-}
+++ /dev/null
-/** Argeo Node backup utilities based on Apache Commons VFS. */
-package org.argeo.slc.backup.vfs;
\ No newline at end of file
+++ /dev/null
-package org.argeo.slc.cms.deploy;
-
-import org.argeo.slc.deploy.DeployedSystem;
-
-public interface CmsDeployedSystem extends DeployedSystem {
-
-}
+++ /dev/null
-package org.argeo.slc.cms.deploy;
-
-import java.util.List;
-
-import org.argeo.slc.deploy.DeploymentData;
-
-public interface CmsDeploymentData extends DeploymentData {
- List<String> getModulesToActivate(int startLevel);
-}
+++ /dev/null
-package org.argeo.slc.cms.deploy;
-
-import java.nio.file.Path;
-
-import org.argeo.slc.deploy.TargetData;
-
-public interface CmsTargetData extends TargetData {
- Path getInstanceData();
-
- Integer getHttpPort();
-
-}
+++ /dev/null
-package org.argeo.slc.cms.deploy;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-public class SimpleCmsDeploymentData implements CmsDeploymentData {
- private Map<Integer, List<String>> startLevels = new TreeMap<>();
-
- @Override
- public List<String> getModulesToActivate(int startLevel) {
- startLevels.putIfAbsent(startLevel, new ArrayList<>());
- return startLevels.get(startLevel);
- }
-
-}
+++ /dev/null
-package org.argeo.slc.cms.deploy;
-
-import java.nio.file.Path;
-
-public class SimpleCmsTargetData implements CmsTargetData {
- private Path instanceData;
- private Integer httpPort;
-
- public SimpleCmsTargetData(Path instanceData, Integer httpPort) {
- this.instanceData = instanceData;
- this.httpPort = httpPort;
- }
-
- public Integer getHttpPort() {
- return httpPort;
- }
-
- public void setHttpPort(Integer httpPort) {
- this.httpPort = httpPort;
- }
-
- public Path getInstanceData() {
- return instanceData;
- }
-
- public void setInstanceData(Path instanceData) {
- this.instanceData = instanceData;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.cms.deploy.osgi;
-
-import org.argeo.slc.build.Distribution;
-import org.argeo.slc.build.ModularDistribution;
-import org.argeo.slc.cms.deploy.CmsDeployedSystem;
-import org.argeo.slc.cms.deploy.CmsDeploymentData;
-import org.argeo.slc.cms.deploy.CmsTargetData;
-import org.argeo.slc.deploy.DeploymentData;
-import org.argeo.slc.deploy.TargetData;
-import org.osgi.framework.BundleContext;
-
-public class CmsOsgiDeployedSystem implements CmsDeployedSystem {
- private ModularDistribution distribution;
- private CmsTargetData targetData;
- private CmsDeploymentData deploymentData;
-
- private BundleContext systemBundleContext;
-
- public CmsOsgiDeployedSystem(BundleContext systemBundleContext, ModularDistribution distribution,
- CmsTargetData targetData, CmsDeploymentData deploymentData) {
- this.systemBundleContext = systemBundleContext;
-
- this.distribution = distribution;
- this.targetData = targetData;
- this.deploymentData = deploymentData;
- }
-
- @Override
- public String getDeployedSystemId() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Distribution getDistribution() {
- return distribution;
- }
-
- @Override
- public DeploymentData getDeploymentData() {
- return deploymentData;
- }
-
- @Override
- public TargetData getTargetData() {
- return targetData;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.cms.deploy.osgi;
-
-import java.io.IOException;
-import java.lang.System.Logger;
-import java.lang.System.Logger.Level;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.List;
-import java.util.Map;
-import java.util.StringJoiner;
-import java.util.TreeMap;
-
-import org.argeo.init.a2.A2Source;
-import org.argeo.init.a2.FsA2Source;
-import org.argeo.init.osgi.OsgiBoot;
-import org.argeo.init.osgi.OsgiRuntimeContext;
-import org.argeo.slc.WellKnownConstants;
-import org.argeo.slc.build.Distribution;
-import org.argeo.slc.cms.deploy.CmsDeployedSystem;
-import org.argeo.slc.cms.deploy.CmsDeploymentData;
-import org.argeo.slc.cms.deploy.CmsTargetData;
-import org.argeo.slc.cms.deploy.SimpleCmsDeploymentData;
-import org.argeo.slc.cms.deploy.SimpleCmsTargetData;
-import org.argeo.slc.cms.distribution.A2Distribution;
-import org.argeo.slc.deploy.DeployedSystem;
-import org.argeo.slc.deploy.Deployment;
-import org.argeo.slc.deploy.DeploymentData;
-import org.argeo.slc.deploy.TargetData;
-
-public class CmsOsgiDeployment implements Deployment {
- private final static Logger logger = System.getLogger(CmsOsgiDeployment.class.getName());
-
- private A2Distribution distribution;
- private CmsTargetData targetData;
- private CmsDeploymentData deploymentData;
-
- private CmsDeployedSystem deployedSystem;
-
- private OsgiRuntimeContext runtimeContext;
-
- @Override
- public void run() {
- try {
- Map<String, String> config = new TreeMap<>();
-
- // sources
- StringJoiner sourcesProperty = new StringJoiner(",");
- for (A2Source a2Source : distribution.getA2Sources()) {
- sourcesProperty.add(a2Source.getUri().toString());
- }
- config.put(OsgiBoot.PROP_ARGEO_OSGI_SOURCES, sourcesProperty.toString());
-
- // target
- config.put(WellKnownConstants.OSGI_INSTANCE_AREA,
- targetData.getInstanceData().toRealPath().toUri().toString());
- if (targetData.getHttpPort() != null) {
- config.put(WellKnownConstants.OSGI_HTTP_PORT, targetData.getHttpPort().toString());
- }
-
- Path configurationArea = Files.createTempDirectory("slc-cms-test");
- config.put(WellKnownConstants.OSGI_CONFIGURATION_AREA, configurationArea.toUri().toString());
-
- // modules activation
- for (int startLevel = 0; startLevel <= 6; startLevel++) {
- List<String> modules = deploymentData.getModulesToActivate(startLevel);
- if (modules.size() != 0) {
- String startProperty = String.join(",", modules);
- config.put(OsgiBoot.PROP_ARGEO_OSGI_START + "." + startLevel + ".node", startProperty);
- }
- }
-
- config.put("org.eclipse.equinox.http.jetty.autostart", "false");
- config.put("org.osgi.framework.bootdelegation",
- "com.sun.jndi.ldap,com.sun.jndi.ldap.sasl,com.sun.security.jgss,com.sun.jndi.dns,com.sun.nio.file,com.sun.nio.sctp");
- config.put("eclipse.ignoreApp", "true");
- config.put("osgi.noShutdown", "true");
-
-// config.put("osgi.console", "true");
-
- // initialise
- for (String key : config.keySet()) {
-// System.out.println(key + "=" + config.get(key));
- logger.log(Level.INFO, () -> key + "=" + config.get(key));
- }
-
- runtimeContext = new OsgiRuntimeContext(config);
- runtimeContext.run();
-
- deployedSystem = new CmsOsgiDeployedSystem(runtimeContext.getFramework().getBundleContext(), distribution,
- targetData, deploymentData);
-
- } catch (Exception e) {
- throw new IllegalStateException("Cannot run OSGi deployment", e);
- }
-
- }
-
- @Override
- public DeployedSystem getDeployedSystem() {
- return deployedSystem;
- }
-
- @Override
- public void setTargetData(TargetData targetData) {
- this.targetData = (CmsTargetData) targetData;
- }
-
- @Override
- public void setDeploymentData(DeploymentData deploymentData) {
- this.deploymentData = (CmsDeploymentData) deploymentData;
- }
-
- @Override
- public void setDistribution(Distribution distribution) {
- this.distribution = (A2Distribution) distribution;
- }
-
- public OsgiRuntimeContext getRuntimeContext() {
- return runtimeContext;
- }
-
- public static void main(String[] args) {
- try {
- Path userHome = Paths.get(System.getProperty("user.home"));
-
- // distribution
- Path a2Base = userHome.resolve("dev/git/unstable/output/a2");
- A2Distribution distribution = new A2Distribution();
- distribution.getA2Sources().add(new FsA2Source(a2Base));
-
- // target data
- Path instanceData = userHome.resolve("dev/git/unstable/argeo-slc/sdk/exec/cms-deployment/data");
- Files.createDirectories(instanceData);
- Integer httpPort = 7070;
- SimpleCmsTargetData targetData = new SimpleCmsTargetData(instanceData, httpPort);
-
- // deployment data
- SimpleCmsDeploymentData deploymentData = new SimpleCmsDeploymentData();
- deploymentData.getModulesToActivate(2).add("org.eclipse.equinox.http.servlet");
- deploymentData.getModulesToActivate(2).add("org.eclipse.equinox.cm");
- deploymentData.getModulesToActivate(2).add("org.apache.felix.scr");
- deploymentData.getModulesToActivate(2).add("org.eclipse.rap.rwt.osgi");
-
- deploymentData.getModulesToActivate(3).add("org.argeo.cms");
-
- deploymentData.getModulesToActivate(4).add("org.argeo.cms.servlet");
- deploymentData.getModulesToActivate(4).add("org.argeo.cms.ui.rap");
- deploymentData.getModulesToActivate(4).add("org.argeo.cms.jcr");
-
- deploymentData.getModulesToActivate(5).add("org.argeo.cms.e4.rap");
-
- CmsOsgiDeployment deployment = new CmsOsgiDeployment();
- deployment.setDistribution(distribution);
- deployment.setTargetData(targetData);
- deployment.setDeploymentData(deploymentData);
- deployment.run();
-
- boolean multiple = false;
- if (multiple) {
-
- Path instanceData2 = userHome.resolve("dev/git/unstable/argeo-slc/sdk/exec/cms-deployment2/data");
- Files.createDirectories(instanceData2);
- Integer httpPort2 = 7071;
- SimpleCmsTargetData targetData2 = new SimpleCmsTargetData(instanceData2, httpPort2);
-
- CmsOsgiDeployment deployment2 = new CmsOsgiDeployment();
- deployment2.setDistribution(distribution);
- deployment2.setTargetData(targetData2);
- deployment2.setDeploymentData(deploymentData);
- deployment2.run();
- }
-
- deployment.getRuntimeContext().waitForStop(0);
-
- } catch (IOException | InterruptedException e) {
- e.printStackTrace();
- System.exit(1);
- }
- }
-
-}
+++ /dev/null
-package org.argeo.slc.cms.distribution;
-
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-import org.argeo.init.a2.A2Branch;
-import org.argeo.init.a2.A2Component;
-import org.argeo.init.a2.A2Contribution;
-import org.argeo.init.a2.A2Module;
-import org.argeo.init.a2.A2Source;
-import org.argeo.slc.CategoryNameVersion;
-import org.argeo.slc.DefaultCategoryNameVersion;
-import org.argeo.slc.NameVersion;
-import org.argeo.slc.build.Distribution;
-import org.argeo.slc.build.ModularDistribution;
-
-public class A2Distribution implements ModularDistribution {
- private List<A2Source> a2Sources = new ArrayList<>();
-
- @Override
- public String getDistributionId() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public String getName() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public String getVersion() {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Iterator<? extends NameVersion> nameVersions() {
- List<CategoryNameVersion> nameVersions = new ArrayList<>();
- for (A2Source a2Source : a2Sources) {
- for (A2Contribution a2Contribution : a2Source.listContributions(null)) {
- for (A2Component a2Component : a2Contribution.listComponents(null)) {
- for (A2Branch a2Branch : a2Component.listBranches(null)) {
- for (A2Module a2Module : a2Branch.listModules(null)) {
- CategoryNameVersion nameVersion = new DefaultCategoryNameVersion(a2Contribution.getId(),
- a2Component.getId(), a2Module.getVersion().toString());
- nameVersions.add(nameVersion);
- }
- }
- }
- }
- }
- return nameVersions.iterator();
- }
-
- @Override
- public Distribution getModuleDistribution(String moduleName, String moduleVersion) {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
- public Object getModulesDescriptor(String descriptorType) {
- // TODO Auto-generated method stub
- return null;
- }
-
- public List<A2Source> getA2Sources() {
- return a2Sources;
- }
-
-
-}
+++ /dev/null
-package org.argeo.slc.cms.distribution;
-
-import org.argeo.init.a2.A2Module;
-import org.argeo.slc.build.Distribution;
-
-public class A2ModuleDistribution implements Distribution {
- private A2Module a2Module;
-
- @Override
- public String getDistributionId() {
- return a2Module.getCoordinates();
- }
-
-}
+++ /dev/null
-package org.argeo.slc.cms.httpclient3;
-
-import org.apache.commons.httpclient.Credentials;
-import org.apache.commons.httpclient.auth.AuthScheme;
-import org.apache.commons.httpclient.auth.CredentialsNotAvailableException;
-import org.apache.commons.httpclient.auth.CredentialsProvider;
-
-/** SPNEGO credential provider */
-public class HttpCredentialProvider implements CredentialsProvider {
-
- @Override
- public Credentials getCredentials(AuthScheme scheme, String host, int port, boolean proxy)
- throws CredentialsNotAvailableException {
- if (scheme instanceof SpnegoAuthScheme)
- return new SpnegoCredentials();
- else
- throw new UnsupportedOperationException("Auth scheme " + scheme.getSchemeName() + " not supported");
- }
-
-}
+++ /dev/null
-package org.argeo.slc.cms.httpclient3;
-
-import java.net.URL;
-import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-
-import javax.security.auth.Subject;
-import javax.security.auth.login.LoginContext;
-
-import org.apache.commons.httpclient.Credentials;
-import org.apache.commons.httpclient.HttpClient;
-import org.apache.commons.httpclient.HttpMethod;
-import org.apache.commons.httpclient.auth.AuthPolicy;
-import org.apache.commons.httpclient.auth.AuthScheme;
-import org.apache.commons.httpclient.auth.AuthenticationException;
-import org.apache.commons.httpclient.auth.CredentialsProvider;
-import org.apache.commons.httpclient.auth.MalformedChallengeException;
-import org.apache.commons.httpclient.methods.GetMethod;
-import org.apache.commons.httpclient.params.DefaultHttpParams;
-import org.apache.commons.httpclient.params.HttpMethodParams;
-import org.apache.commons.httpclient.params.HttpParams;
-import org.argeo.cms.auth.RemoteAuthUtils;
-
-//// Register client-side SPNEGO auth scheme
-//AuthPolicy.registerAuthScheme(SpnegoAuthScheme.NAME, SpnegoAuthScheme.class);
-//HttpParams params = DefaultHttpParams.getDefaultParams();
-//ArrayList<String> schemes = new ArrayList<>();
-//schemes.add(SpnegoAuthScheme.NAME);// SPNEGO preferred
-//// schemes.add(AuthPolicy.BASIC);// incompatible with Basic
-//params.setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, schemes);
-//params.setParameter(CredentialsProvider.PROVIDER, new HttpCredentialProvider());
-//params.setParameter(HttpMethodParams.COOKIE_POLICY, KernelConstants.COOKIE_POLICY_BROWSER_COMPATIBILITY);
-//// params.setCookiePolicy(CookiePolicy.BROWSER_COMPATIBILITY);
-
-
-
-/** Implementation of the SPNEGO auth scheme. */
-public class SpnegoAuthScheme implements AuthScheme {
-// private final static Log log = LogFactory.getLog(SpnegoAuthScheme.class);
-
- public static final String NAME = "Negotiate";
-// private final static Oid KERBEROS_OID;
-// static {
-// try {
-// KERBEROS_OID = new Oid("1.3.6.1.5.5.2");
-// } catch (GSSException e) {
-// throw new IllegalStateException("Cannot create Kerberos OID", e);
-// }
-// }
-
- private final static String DEFAULT_KERBEROS_SERVICE = "HTTP";
-
- private boolean complete = false;
- private String realm;
-
- @Override
- public void processChallenge(String challenge) throws MalformedChallengeException {
- // if(tokenStr!=null){
- // log.error("Received challenge while there is a token. Failing.");
- // complete = false;
- // }
-
- }
-
- @Override
- public String getSchemeName() {
- return NAME;
- }
-
- @Override
- public String getParameter(String name) {
- return null;
- }
-
- @Override
- public String getRealm() {
- return realm;
- }
-
- @Override
- public String getID() {
- return NAME;
- }
-
- @Override
- public boolean isConnectionBased() {
- return true;
- }
-
- @Override
- public boolean isComplete() {
- return complete;
- }
-
- @Override
- public String authenticate(Credentials credentials, String method, String uri) throws AuthenticationException {
- // log.debug("authenticate " + method + " " + uri);
- // return null;
- throw new UnsupportedOperationException();
- }
-
- @Override
- public String authenticate(Credentials credentials, HttpMethod method) throws AuthenticationException {
-// GSSContext context = null;
- String hostname;
- try {
- hostname = method.getURI().getHost();
- String tokenStr = RemoteAuthUtils.createGssToken(null, DEFAULT_KERBEROS_SERVICE, hostname);
- return "Negotiate " + tokenStr;
- } catch (Exception e1) {
- complete = true;
- throw new AuthenticationException("Cannot authenticate " + method, e1);
- }
-// String serverPrinc = DEFAULT_KERBEROS_SERVICE + "@" + hostname;
-//
-// try {
-// // Get service's principal name
-// GSSManager manager = GSSManager.getInstance();
-// GSSName serverName = manager.createName(serverPrinc, GSSName.NT_HOSTBASED_SERVICE, KERBEROS_OID);
-//
-// // Get the context for authentication
-// context = manager.createContext(serverName, KERBEROS_OID, null, GSSContext.DEFAULT_LIFETIME);
-// // context.requestMutualAuth(true); // Request mutual authentication
-// // context.requestConf(true); // Request confidentiality
-// context.requestCredDeleg(true);
-//
-// byte[] token = new byte[0];
-//
-// // token is ignored on the first call
-// token = context.initSecContext(token, 0, token.length);
-//
-// // Send a token to the server if one was generated by
-// // initSecContext
-// if (token != null) {
-// tokenStr = Base64.getEncoder().encodeToString(token);
-// // complete=true;
-// }
-// } catch (GSSException e) {
-// complete = true;
-// throw new AuthenticationException("Cannot authenticate to " + serverPrinc, e);
-// }
- }
-
- public static void main(String[] args) {
- String principal = System.getProperty("javax.security.auth.login.name");
- if (args.length == 0 || principal == null) {
- System.err.println("usage: java -Djavax.security.auth.login.name=<principal@REALM> "
- + SpnegoAuthScheme.class.getName() + " <url>");
- System.exit(1);
- return;
- }
- String url = args[0];
-
- URL jaasUrl = SpnegoAuthScheme.class.getResource("jaas.cfg");
- System.setProperty("java.security.auth.login.config", jaasUrl.toExternalForm());
- try {
- LoginContext lc = new LoginContext("SINGLE_USER");
- lc.login();
-
- AuthPolicy.registerAuthScheme(SpnegoAuthScheme.NAME, SpnegoAuthScheme.class);
- HttpParams params = DefaultHttpParams.getDefaultParams();
- ArrayList<String> schemes = new ArrayList<>();
- schemes.add(SpnegoAuthScheme.NAME);
- params.setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, schemes);
- params.setParameter(CredentialsProvider.PROVIDER, new HttpCredentialProvider());
-
- int responseCode = Subject.doAs(lc.getSubject(), new PrivilegedExceptionAction<Integer>() {
- public Integer run() throws Exception {
- HttpClient httpClient = new HttpClient();
- return httpClient.executeMethod(new GetMethod(url));
- }
- });
- System.out.println("Reponse code: " + responseCode);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
-}
+++ /dev/null
-package org.argeo.slc.cms.httpclient3;
-
-import org.apache.commons.httpclient.Credentials;
-
-public class SpnegoCredentials implements Credentials {
-
-}
+++ /dev/null
-SINGLE_USER {
- com.sun.security.auth.module.Krb5LoginModule required
- useTicketCache=true
- debug=true;
-};
-
-com.sun.security.jgss.krb5.initiate {
- com.sun.security.auth.module.Krb5LoginModule
- required useTicketCache=true;
-};
\ No newline at end of file
+++ /dev/null
-package org.argeo.slc.cms.test;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-
-public class CmsSmokeTest {
-
- public static void main(String[] args) throws IOException {
- Path instanceData;
- if (args.length > 0) {
- instanceData = Paths.get(args[0]);
- } else {
- instanceData = Files.createTempDirectory("cms-test");
- }
-
- }
-
-}
+++ /dev/null
--include: \
-${workspace}/cnf/unstable.bnd, \
-${workspace}/sdk/argeo-build/argeo.bnd, \
+++ /dev/null
-MAJOR=2
-MINOR=3
-MICRO=4
-qualifier=.next
-
-category=org.argeo.slc
-Bundle-RequiredExecutionEnvironment=JavaSE-11
-
-argeo.rpm.stagingRepository=/srv/rpmfactory/unstable/argeo-osgi-2/argeo
-argeo.rpm.suffix=-unstable
#!/bin/sh
-# We build where we are
-SDK_BUILD_BASE=$(pwd -P)/output
-
# Source are located where this script is
SDK_SRC_BASE="$(cd "$(dirname "$0")"; pwd -P)"
-SDK_MK=$SDK_SRC_BASE/sdk.mk
-
-#echo SDK_BUILD_BASE=$SDK_BUILD_BASE
-#echo SDK_SRC_BASE=$SDK_SRC_BASE
-#echo SDK_MK=$SDK_MK
-
-if [ -f "$SDK_MK" ];
-then
-
-echo "File $SDK_MK already exists. Remove it in order to configure a new build location:"
-echo "rm $SDK_MK"
-exit 1
-
-else
-
-if [ -z "$JAVA_HOME" ]
-then
-echo "Environment variable JAVA_HOME must be set"
-exit 1
-fi
-
-# Create build directory, so that it can be used right away
-# and we check whether we have the rights
-mkdir -p $SDK_BUILD_BASE
-if [ -f "$SDK_MK" ];
-then
-echo "Cannot create $SDK_BUILD_BASE, SDK configuration has failed."
-exit 2
-fi
-
-# Generate sdk.mk
-cat > "$SDK_MK" <<EOF
-SDK_SRC_BASE := $SDK_SRC_BASE
-SDK_BUILD_BASE := $SDK_BUILD_BASE
-JAVA_HOME := $JAVA_HOME
-
-include \$(SDK_SRC_BASE)/branch.mk
-EOF
-
-
-echo SDK was configured.
-echo "JAVA_HOME : $JAVA_HOME"
-echo "Base for sources : $SDK_SRC_BASE"
-echo "Base for builds : $SDK_BUILD_BASE"
-exit 0
-fi
-
+# Source the configure script
+. $SDK_SRC_BASE/sdk/argeo-build/configure
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-17"/>
+ <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+ <classpathentry kind="src" path="src"/>
+ <classpathentry kind="output" path="bin"/>
+</classpath>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>org.argeo.slc.cms</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.ManifestBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.SchemaBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.pde.PluginNature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+</projectDescription>
--- /dev/null
+Import-Package: \
+org.apache.commons.logging,\
+org.osgi.*;version="0.0.0",\
+*
\ No newline at end of file
--- /dev/null
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+ .
+additional.bundles = org.argeo.init
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+import org.apache.commons.vfs2.provider.sftp.SftpFileSystemConfigBuilder;
+
+/**
+ * Simplify atomic backups implementation, especially by managing VFS.
+ */
+public abstract class AbstractAtomicBackup implements AtomicBackup {
+ private String name;
+ private String compression = "bz2";
+
+ protected abstract void writeBackup(FileObject targetFo);
+
+ public AbstractAtomicBackup() {
+ }
+
+ public AbstractAtomicBackup(String name) {
+ this.name = name;
+ }
+
+ public void init() {
+ if (name == null)
+ throw new MaintenanceException("Atomic backup name must be set");
+ }
+
+ public void destroy() {
+
+ }
+
+ @Override
+ public String backup(FileSystemManager fileSystemManager,
+ String backupsBase, BackupContext backupContext,
+ FileSystemOptions opts) {
+ if (name == null)
+ throw new MaintenanceException("Atomic backup name must be set");
+
+ FileObject targetFo = null;
+ try {
+ if (backupsBase.startsWith("sftp:"))
+ SftpFileSystemConfigBuilder.getInstance()
+ .setStrictHostKeyChecking(opts, "no");
+ if (compression == null || compression.equals("none"))
+ targetFo = fileSystemManager.resolveFile(backupsBase + '/'
+ + backupContext.getRelativeFolder() + '/' + name, opts);
+ else if (compression.equals("bz2"))
+ targetFo = fileSystemManager.resolveFile("bz2:" + backupsBase
+ + '/' + backupContext.getRelativeFolder() + '/' + name
+ + ".bz2" + "!" + name, opts);
+ else if (compression.equals("gz"))
+ targetFo = fileSystemManager.resolveFile("gz:" + backupsBase
+ + '/' + backupContext.getRelativeFolder() + '/' + name
+ + ".gz" + "!" + name, opts);
+ else
+ throw new MaintenanceException("Unsupported compression "
+ + compression);
+
+ writeBackup(targetFo);
+
+ return targetFo.toString();
+ } catch (Exception e) {
+ throw new MaintenanceException("Cannot backup " + name + " to "
+ + targetFo, e);
+ } finally {
+ BackupUtils.closeFOQuietly(targetFo);
+ }
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setCompression(String compression) {
+ this.compression = compression;
+ }
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+
+/** Performs the backup of a single component, typically a database dump */
+public interface AtomicBackup {
+ /** Name identifiying this backup */
+ public String getName();
+
+ /**
+ * Retrieves the data of the component in a format that allows to restore
+ * the component
+ *
+ * @param backupContext
+ * the context of this backup
+ * @return the VFS URI of the generated file or directory
+ */
+ public String backup(FileSystemManager fileSystemManager,
+ String backupsBase, BackupContext backupContext,
+ FileSystemOptions opts);
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.text.DateFormat;
+import java.util.Date;
+
+/**
+ * Transient information of a given backup, centralizing common information such
+ * as timestamp and location.
+ */
+public interface BackupContext {
+ /** Backup date */
+ public Date getTimestamp();
+
+ /** Formatted backup date */
+ public String getTimestampAsString();
+
+ /** System name */
+ public String getSystemName();
+
+ /** Local base */
+ public String getRelativeFolder();
+
+ /** Date format */
+ public DateFormat getDateFormat();
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.impl.DefaultFileSystemManager;
+import org.apache.commons.vfs2.provider.bzip2.Bzip2FileProvider;
+import org.apache.commons.vfs2.provider.ftp.FtpFileProvider;
+import org.apache.commons.vfs2.provider.gzip.GzipFileProvider;
+import org.apache.commons.vfs2.provider.local.DefaultLocalFileProvider;
+import org.apache.commons.vfs2.provider.ram.RamFileProvider;
+import org.apache.commons.vfs2.provider.sftp.SftpFileProvider;
+import org.apache.commons.vfs2.provider.url.UrlFileProvider;
+
+/**
+ * Programatically configured VFS file system manager which can be declared as a
+ * bean and associated with a life cycle (methods
+ * {@link DefaultFileSystemManager#init()} and
+ * {@link DefaultFileSystemManager#close()}). Supports bz2, file, ram, gzip,
+ * ftp, sftp
+ */
+public class BackupFileSystemManager extends DefaultFileSystemManager {
+
+ public BackupFileSystemManager() {
+ super();
+ try {
+ addProvider("file", new DefaultLocalFileProvider());
+ addProvider("bz2", new Bzip2FileProvider());
+ addProvider("ftp", new FtpFileProvider());
+ addProvider("sftp", new SftpFileProvider());
+ addProvider("gzip", new GzipFileProvider());
+ addProvider("ram", new RamFileProvider());
+ setDefaultProvider(new UrlFileProvider());
+ } catch (FileSystemException e) {
+ throw new MaintenanceException("Cannot configure backup file provider", e);
+ }
+ }
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.text.DateFormat;
+
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+
+/** Purges previous backups */
+public interface BackupPurge {
+ /**
+ * Purge the backups identified by these arguments. Although these are the
+ * same fields as a {@link BackupContext} we don't pass it as argument since
+ * we want to use this interface to purge remote backups as well (that is,
+ * with a different base), or outside the scope of a running backup.
+ */
+ public void purge(FileSystemManager fileSystemManager, String base,
+ String name, DateFormat dateFormat, FileSystemOptions opts);
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backup utilities */
+public class BackupUtils {
+ /** Close a file object quietly even if it is null or throws an exception. */
+ public static void closeFOQuietly(FileObject fo) {
+ if (fo != null) {
+ try {
+ fo.close();
+ } catch (Exception e) {
+ // silent
+ }
+ }
+ }
+
+ /** Prevents instantiation */
+ private BackupUtils() {
+ }
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+@Deprecated
+class MaintenanceException extends RuntimeException {
+ private static final long serialVersionUID = -5770049663929537270L;
+
+ public MaintenanceException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public MaintenanceException(String message) {
+ super(message);
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backups a MySQL database using mysqldump. */
+public class MySqlBackup extends OsCallBackup {
+ private String mysqldumpLocation = "/usr/bin/mysqldump";
+
+ private String dbUser;
+ private String dbPassword;
+ private String dbName;
+
+ public MySqlBackup() {
+ }
+
+ public MySqlBackup(String dbUser, String dbPassword, String dbName) {
+ this.dbUser = dbUser;
+ this.dbPassword = dbPassword;
+ this.dbName = dbName;
+ init();
+ }
+
+ @Override
+ public void init() {
+ if (getName() == null)
+ setName(dbName + ".mysql");
+ super.init();
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ if (getCommand() == null)
+ setCommand(mysqldumpLocation
+ + " --lock-tables --add-locks --add-drop-table"
+ + " -u ${dbUser} --password=${dbPassword} --databases ${dbName}");
+ getVariables().put("dbUser", dbUser);
+ getVariables().put("dbPassword", dbPassword);
+ getVariables().put("dbName", dbName);
+
+ super.writeBackup(targetFo);
+ }
+
+ public void setDbUser(String dbUser) {
+ this.dbUser = dbUser;
+ }
+
+ public void setDbPassword(String dbPassword) {
+ this.dbPassword = dbPassword;
+ }
+
+ public void setDbName(String dbName) {
+ this.dbName = dbName;
+ }
+
+ public void setMysqldumpLocation(String mysqldumpLocation) {
+ this.mysqldumpLocation = mysqldumpLocation;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backups an OpenLDAP server using slapcat */
+public class OpenLdapBackup extends OsCallBackup {
+ private String slapcatLocation = "/usr/sbin/slapcat";
+ private String slapdConfLocation = "/etc/openldap/slapd.conf";
+ private String baseDn;
+
+ public OpenLdapBackup() {
+ super();
+ }
+
+ public OpenLdapBackup(String baseDn) {
+ super();
+ this.baseDn = baseDn;
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ if (baseDn == null)
+ throw new MaintenanceException("Base DN must be set");
+
+ if (getCommand() == null)
+ setCommand(slapcatLocation
+ + " -f ${slapdConfLocation} -b '${baseDn}'");
+ getVariables().put("slapdConfLocation", slapdConfLocation);
+ getVariables().put("baseDn", baseDn);
+
+ super.writeBackup(targetFo);
+ }
+
+ public void setSlapcatLocation(String slapcatLocation) {
+ this.slapcatLocation = slapcatLocation;
+ }
+
+ public void setSlapdConfLocation(String slapdConfLocation) {
+ this.slapdConfLocation = slapdConfLocation;
+ }
+
+ public void setBaseDn(String baseDn) {
+ this.baseDn = baseDn;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.io.ByteArrayOutputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteStreamHandler;
+import org.apache.commons.exec.Executor;
+import org.apache.commons.exec.PumpStreamHandler;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.vfs2.FileContent;
+import org.apache.commons.vfs2.FileObject;
+import org.argeo.api.cms.CmsLog;
+
+/**
+ * Runs an OS command and save its standard output as a file. Typically used for
+ * MySQL or OpenLDAP dumps.
+ */
+public class OsCallBackup extends AbstractAtomicBackup {
+ private final static CmsLog log = CmsLog.getLog(OsCallBackup.class);
+
+ private String command;
+ private Map<String, String> variables = new HashMap<String, String>();
+ private Executor executor = new DefaultExecutor();
+
+ private Map<String, String> environment = new HashMap<String, String>();
+
+ /** Name of the sudo user, root if "", not sudo if null */
+ private String sudo = null;
+
+ public OsCallBackup() {
+ }
+
+ public OsCallBackup(String name) {
+ super(name);
+ }
+
+ public OsCallBackup(String name, String command) {
+ super(name);
+ this.command = command;
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ String commandToUse = command;
+
+ // sudo
+ if (sudo != null) {
+ if (sudo.equals(""))
+ commandToUse = "sudo " + commandToUse;
+ else
+ commandToUse = "sudo -u " + sudo + " " + commandToUse;
+ }
+
+ CommandLine commandLine = CommandLine.parse(commandToUse, variables);
+ ByteArrayOutputStream errBos = new ByteArrayOutputStream();
+ if (log.isTraceEnabled())
+ log.trace(commandLine.toString());
+
+ try {
+ // stdout
+ FileContent targetContent = targetFo.getContent();
+ // stderr
+ ExecuteStreamHandler streamHandler = new PumpStreamHandler(targetContent.getOutputStream(), errBos);
+ executor.setStreamHandler(streamHandler);
+ executor.execute(commandLine, environment);
+ } catch (ExecuteException e) {
+ byte[] err = errBos.toByteArray();
+ String errStr = new String(err);
+ throw new MaintenanceException("Process " + commandLine + " failed (" + e.getExitValue() + "): " + errStr, e);
+ } catch (Exception e) {
+ byte[] err = errBos.toByteArray();
+ String errStr = new String(err);
+ throw new MaintenanceException("Process " + commandLine + " failed: " + errStr, e);
+ } finally {
+ IOUtils.closeQuietly(errBos);
+ }
+ }
+
+ public void setCommand(String command) {
+ this.command = command;
+ }
+
+ protected String getCommand() {
+ return command;
+ }
+
+ /**
+ * A reference to the environment variables that will be passed to the
+ * process. Empty by default.
+ */
+ protected Map<String, String> getEnvironment() {
+ return environment;
+ }
+
+ protected Map<String, String> getVariables() {
+ return variables;
+ }
+
+ public void setVariables(Map<String, String> variables) {
+ this.variables = variables;
+ }
+
+ public void setExecutor(Executor executor) {
+ this.executor = executor;
+ }
+
+ public void setSudo(String sudo) {
+ this.sudo = sudo;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backups a PostgreSQL database using pg_dump. */
+public class PostgreSqlBackup extends OsCallBackup {
+ /**
+ * PostgreSQL password environment variable (see
+ * http://stackoverflow.com/questions
+ * /2893954/how-to-pass-in-password-to-pg-dump)
+ */
+ protected final static String PGPASSWORD = "PGPASSWORD";
+
+ private String pgDumpLocation = "/usr/bin/pg_dump";
+
+ private String dbUser;
+ private String dbPassword;
+ private String dbName;
+
+ public PostgreSqlBackup() {
+ super();
+ }
+
+ public PostgreSqlBackup(String dbUser, String dbPassword, String dbName) {
+ this.dbUser = dbUser;
+ this.dbPassword = dbPassword;
+ this.dbName = dbName;
+ init();
+ }
+
+ @Override
+ public void init() {
+ // disable compression since pg_dump is used with -Fc option
+ setCompression(null);
+
+ if (getName() == null)
+ setName(dbName + ".pgdump");
+ super.init();
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ if (getCommand() == null) {
+ getEnvironment().put(PGPASSWORD, dbPassword);
+ setCommand(pgDumpLocation + " -Fc" + " -U ${dbUser} ${dbName}");
+ }
+ getVariables().put("dbUser", dbUser);
+ getVariables().put("dbPassword", dbPassword);
+ getVariables().put("dbName", dbName);
+
+ super.writeBackup(targetFo);
+ }
+
+ public void setDbUser(String dbUser) {
+ this.dbUser = dbUser;
+ }
+
+ public void setDbPassword(String dbPassword) {
+ this.dbPassword = dbPassword;
+ }
+
+ public void setDbName(String dbName) {
+ this.dbName = dbName;
+ }
+
+ public void setPgDumpLocation(String mysqldumpLocation) {
+ this.pgDumpLocation = mysqldumpLocation;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.apache.commons.vfs2.FileSystemManager;
+
+/** Simple implementation of a backup context */
+public class SimpleBackupContext implements BackupContext {
+ private DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmm");
+ private final Date timestamp;
+ private final String name;
+
+ private final FileSystemManager fileSystemManager;
+
+ public SimpleBackupContext(FileSystemManager fileSystemManager,
+ String backupsBase, String name) {
+ this.name = name;
+ this.timestamp = new Date();
+ this.fileSystemManager = fileSystemManager;
+ }
+
+ public Date getTimestamp() {
+ return timestamp;
+ }
+
+ public String getTimestampAsString() {
+ return dateFormat.format(timestamp);
+ }
+
+ public String getSystemName() {
+ return name;
+ }
+
+ public String getRelativeFolder() {
+ return name + '/' + getTimestampAsString();
+ }
+
+ public DateFormat getDateFormat() {
+ return dateFormat;
+ }
+
+ public FileSystemManager getFileSystemManager() {
+ return fileSystemManager;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.text.DateFormat;
+import java.time.Period;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.util.Date;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+import org.apache.commons.vfs2.Selectors;
+import org.argeo.api.cms.CmsLog;
+
+/** Simple backup purge which keeps backups only for a given number of days */
+public class SimpleBackupPurge implements BackupPurge {
+ private final static CmsLog log = CmsLog.getLog(SimpleBackupPurge.class);
+
+ private Integer daysKept = 30;
+
+ @Override
+ public void purge(FileSystemManager fileSystemManager, String base, String name, DateFormat dateFormat,
+ FileSystemOptions opts) {
+ try {
+ ZonedDateTime nowDt = ZonedDateTime.now();
+ FileObject baseFo = fileSystemManager.resolveFile(base + '/' + name, opts);
+
+ SortedMap<ZonedDateTime, FileObject> toDelete = new TreeMap<ZonedDateTime, FileObject>();
+ int backupCount = 0;
+
+ // make sure base dir exists
+ baseFo.createFolder();
+
+ // scan backups and list those which should be deleted
+ for (FileObject backupFo : baseFo.getChildren()) {
+ String backupName = backupFo.getName().getBaseName();
+ Date backupDate = dateFormat.parse(backupName);
+ backupCount++;
+ ZonedDateTime backupDt = ZonedDateTime.ofInstant(backupDate.toInstant(), ZoneId.systemDefault());
+ Period sinceThen = Period.between(backupDt.toLocalDate(), nowDt.toLocalDate());
+ // new Period(backupDt, nowDt);
+ int days = sinceThen.getDays();
+ // int days = sinceThen.getMinutes();
+ if (days > daysKept) {
+ toDelete.put(backupDt, backupFo);
+ }
+ }
+
+ if (toDelete.size() != 0 && toDelete.size() == backupCount) {
+ // all backups would be deleted
+ // but we want to keep at least one
+ ZonedDateTime lastBackupDt = toDelete.firstKey();
+ FileObject keptFo = toDelete.remove(lastBackupDt);
+ log.warn("Backup " + keptFo + " kept although it is older than " + daysKept + " days.");
+ }
+
+ // delete old backups
+ for (FileObject backupFo : toDelete.values()) {
+ backupFo.delete(Selectors.SELECT_ALL);
+ if (log.isDebugEnabled())
+ log.debug("Deleted backup " + backupFo);
+ }
+ } catch (Exception e) {
+ throw new MaintenanceException("Could not purge previous backups", e);
+ }
+
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.io.File;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backups a Subversion repository using svnadmin. */
+public class SvnBackup extends OsCallBackup {
+ private String svnadminLocation = "/usr/bin/svnadmin";
+
+ private String repoLocation;
+ private String repoName;
+
+ public SvnBackup() {
+ }
+
+ public SvnBackup(String repoLocation) {
+ this.repoLocation = repoLocation;
+ init();
+ }
+
+ @Override
+ public void init() {
+ // use directory as repo name
+ if (repoName == null)
+ repoName = new File(repoLocation).getName();
+
+ if (getName() == null)
+ setName(repoName + ".svndump");
+ super.init();
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ if (getCommand() == null) {
+ setCommand(svnadminLocation + " dump " + " ${repoLocation}");
+ }
+ getVariables().put("repoLocation", repoLocation);
+
+ super.writeBackup(targetFo);
+ }
+
+ public void setRepoLocation(String repoLocation) {
+ this.repoLocation = repoLocation;
+ }
+
+ public void setRepoName(String repoName) {
+ this.repoName = repoName;
+ }
+
+ public void setSvnadminLocation(String mysqldumpLocation) {
+ this.svnadminLocation = mysqldumpLocation;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+import org.apache.commons.vfs2.Selectors;
+import org.apache.commons.vfs2.UserAuthenticator;
+import org.apache.commons.vfs2.impl.DefaultFileSystemConfigBuilder;
+import org.argeo.api.cms.CmsLog;
+import org.argeo.util.LangUtils;
+
+/**
+ * Combines multiple backups and transfer them to a remote location. Purges
+ * remote and local data based on certain criteria.
+ */
+public class SystemBackup implements Runnable {
+ private final static CmsLog log = CmsLog.getLog(SystemBackup.class);
+
+ private FileSystemManager fileSystemManager;
+ private UserAuthenticator userAuthenticator = null;
+
+ private String backupsBase;
+ private String systemName;
+
+ private List<AtomicBackup> atomicBackups = new ArrayList<AtomicBackup>();
+ private BackupPurge backupPurge = new SimpleBackupPurge();
+
+ private Map<String, UserAuthenticator> remoteBases = new HashMap<String, UserAuthenticator>();
+
+ @Override
+ public void run() {
+ if (atomicBackups.size() == 0)
+ throw new MaintenanceException("No atomic backup listed");
+ List<String> failures = new ArrayList<String>();
+
+ SimpleBackupContext backupContext = new SimpleBackupContext(fileSystemManager, backupsBase, systemName);
+
+ // purge older backups
+ FileSystemOptions opts = new FileSystemOptions();
+ try {
+ DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(opts, userAuthenticator);
+ } catch (Exception e) {
+ throw new MaintenanceException("Cannot create authentication", e);
+ }
+
+ try {
+
+ backupPurge.purge(fileSystemManager, backupsBase, systemName, backupContext.getDateFormat(), opts);
+ } catch (Exception e) {
+ failures.add("Purge " + backupsBase + " failed: " + e.getMessage());
+ log.error("Purge of " + backupsBase + " failed", e);
+ }
+
+ // perform backup
+ for (AtomicBackup atomickBackup : atomicBackups) {
+ try {
+ String target = atomickBackup.backup(fileSystemManager, backupsBase, backupContext, opts);
+ if (log.isDebugEnabled())
+ log.debug("Performed backup " + target);
+ } catch (Exception e) {
+ String msg = "Atomic backup " + atomickBackup.getName() + " failed: "
+ + LangUtils.chainCausesMessages(e);
+ failures.add(msg);
+ log.error(msg);
+ if (log.isTraceEnabled())
+ log.trace("Stacktrace of atomic backup " + atomickBackup.getName() + " failure.", e);
+ }
+ }
+
+ // dispatch to remote
+ for (String remoteBase : remoteBases.keySet()) {
+ FileObject localBaseFo = null;
+ FileObject remoteBaseFo = null;
+ UserAuthenticator auth = remoteBases.get(remoteBase);
+
+ // authentication
+ FileSystemOptions remoteOpts = new FileSystemOptions();
+ try {
+ DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(remoteOpts, auth);
+ backupPurge.purge(fileSystemManager, remoteBase, systemName, backupContext.getDateFormat(), remoteOpts);
+ } catch (Exception e) {
+ failures.add("Purge " + remoteBase + " failed: " + e.getMessage());
+ log.error("Cannot purge " + remoteBase, e);
+ }
+
+ try {
+ localBaseFo = fileSystemManager.resolveFile(backupsBase + '/' + backupContext.getRelativeFolder(),
+ opts);
+ remoteBaseFo = fileSystemManager.resolveFile(remoteBase + '/' + backupContext.getRelativeFolder(),
+ remoteOpts);
+ remoteBaseFo.copyFrom(localBaseFo, Selectors.SELECT_ALL);
+ if (log.isDebugEnabled())
+ log.debug("Copied backup to " + remoteBaseFo + " from " + localBaseFo);
+ // }
+ } catch (Exception e) {
+ failures.add("Dispatch to " + remoteBase + " failed: " + e.getMessage());
+ log.error("Cannot dispatch backups from " + backupContext.getRelativeFolder() + " to " + remoteBase, e);
+ }
+ BackupUtils.closeFOQuietly(localBaseFo);
+ BackupUtils.closeFOQuietly(remoteBaseFo);
+ }
+
+ int failureCount = 0;
+ if (failures.size() > 0) {
+ StringBuffer buf = new StringBuffer();
+ for (String failure : failures) {
+ buf.append('\n').append(failureCount).append(" - ").append(failure);
+ failureCount++;
+ }
+ throw new MaintenanceException(failureCount + " error(s) when running the backup,"
+ + " check the logs and the backups as soon as possible." + buf);
+ }
+ }
+
+ public void setFileSystemManager(FileSystemManager fileSystemManager) {
+ this.fileSystemManager = fileSystemManager;
+ }
+
+ public void setBackupsBase(String backupsBase) {
+ this.backupsBase = backupsBase;
+ }
+
+ public void setSystemName(String name) {
+ this.systemName = name;
+ }
+
+ public void setAtomicBackups(List<AtomicBackup> atomicBackups) {
+ this.atomicBackups = atomicBackups;
+ }
+
+ public void setBackupPurge(BackupPurge backupPurge) {
+ this.backupPurge = backupPurge;
+ }
+
+ public void setUserAuthenticator(UserAuthenticator userAuthenticator) {
+ this.userAuthenticator = userAuthenticator;
+ }
+
+ public void setRemoteBases(Map<String, UserAuthenticator> remoteBases) {
+ this.remoteBases = remoteBases;
+ }
+
+ // public static void main(String args[]) {
+ // while (true) {
+ // try {
+ // StandardFileSystemManager fsm = new StandardFileSystemManager();
+ // fsm.init();
+ //
+ // SystemBackup systemBackup = new SystemBackup();
+ // systemBackup.setSystemName("mySystem");
+ // systemBackup
+ // .setBackupsBase("/home/mbaudier/dev/src/commons/server/runtime/org.argeo.server.core/target");
+ // systemBackup.setFileSystemManager(fsm);
+ //
+ // List<AtomicBackup> atomicBackups = new ArrayList<AtomicBackup>();
+ //
+ // MySqlBackup mySqlBackup = new MySqlBackup("root", "", "test");
+ // atomicBackups.add(mySqlBackup);
+ // PostgreSqlBackup postgreSqlBackup = new PostgreSqlBackup(
+ // "argeo", "argeo", "gis_template");
+ // atomicBackups.add(postgreSqlBackup);
+ // SvnBackup svnBackup = new SvnBackup(
+ // "/home/mbaudier/tmp/testsvnrepo");
+ // atomicBackups.add(svnBackup);
+ //
+ // systemBackup.setAtomicBackups(atomicBackups);
+ //
+ // Map<String, UserAuthenticator> remoteBases = new HashMap<String,
+ // UserAuthenticator>();
+ // StaticUserAuthenticator userAuthenticator = new StaticUserAuthenticator(
+ // null, "demo", "demo");
+ // remoteBases.put("sftp://localhost/home/mbaudier/test",
+ // userAuthenticator);
+ // systemBackup.setRemoteBases(remoteBases);
+ //
+ // systemBackup.run();
+ //
+ // fsm.close();
+ // } catch (FileSystemException e) {
+ // // TODO Auto-generated catch block
+ // e.printStackTrace();
+ // System.exit(1);
+ // }
+ //
+ // // wait
+ // try {
+ // Thread.sleep(120 * 1000);
+ // } catch (InterruptedException e) {
+ // e.printStackTrace();
+ // }
+ // }
+ // }
+}
--- /dev/null
+/** Argeo Node backup utilities based on Apache Commons VFS. */
+package org.argeo.slc.backup.vfs;
\ No newline at end of file
--- /dev/null
+package org.argeo.slc.cms.deploy;
+
+import org.argeo.slc.deploy.DeployedSystem;
+
+public interface CmsDeployedSystem extends DeployedSystem {
+
+}
--- /dev/null
+package org.argeo.slc.cms.deploy;
+
+import java.util.List;
+
+import org.argeo.slc.deploy.DeploymentData;
+
+public interface CmsDeploymentData extends DeploymentData {
+ List<String> getModulesToActivate(int startLevel);
+}
--- /dev/null
+package org.argeo.slc.cms.deploy;
+
+import java.nio.file.Path;
+
+import org.argeo.slc.deploy.TargetData;
+
+public interface CmsTargetData extends TargetData {
+ Path getInstanceData();
+
+ Integer getHttpPort();
+
+}
--- /dev/null
+package org.argeo.slc.cms.deploy;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class SimpleCmsDeploymentData implements CmsDeploymentData {
+ private Map<Integer, List<String>> startLevels = new TreeMap<>();
+
+ @Override
+ public List<String> getModulesToActivate(int startLevel) {
+ startLevels.putIfAbsent(startLevel, new ArrayList<>());
+ return startLevels.get(startLevel);
+ }
+
+}
--- /dev/null
+package org.argeo.slc.cms.deploy;
+
+import java.nio.file.Path;
+
+public class SimpleCmsTargetData implements CmsTargetData {
+ private Path instanceData;
+ private Integer httpPort;
+
+ public SimpleCmsTargetData(Path instanceData, Integer httpPort) {
+ this.instanceData = instanceData;
+ this.httpPort = httpPort;
+ }
+
+ public Integer getHttpPort() {
+ return httpPort;
+ }
+
+ public void setHttpPort(Integer httpPort) {
+ this.httpPort = httpPort;
+ }
+
+ public Path getInstanceData() {
+ return instanceData;
+ }
+
+ public void setInstanceData(Path instanceData) {
+ this.instanceData = instanceData;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.cms.deploy.osgi;
+
+import org.argeo.slc.build.Distribution;
+import org.argeo.slc.build.ModularDistribution;
+import org.argeo.slc.cms.deploy.CmsDeployedSystem;
+import org.argeo.slc.cms.deploy.CmsDeploymentData;
+import org.argeo.slc.cms.deploy.CmsTargetData;
+import org.argeo.slc.deploy.DeploymentData;
+import org.argeo.slc.deploy.TargetData;
+import org.osgi.framework.BundleContext;
+
+public class CmsOsgiDeployedSystem implements CmsDeployedSystem {
+ private ModularDistribution distribution;
+ private CmsTargetData targetData;
+ private CmsDeploymentData deploymentData;
+
+ private BundleContext systemBundleContext;
+
+ public CmsOsgiDeployedSystem(BundleContext systemBundleContext, ModularDistribution distribution,
+ CmsTargetData targetData, CmsDeploymentData deploymentData) {
+ this.systemBundleContext = systemBundleContext;
+
+ this.distribution = distribution;
+ this.targetData = targetData;
+ this.deploymentData = deploymentData;
+ }
+
+ @Override
+ public String getDeployedSystemId() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Distribution getDistribution() {
+ return distribution;
+ }
+
+ @Override
+ public DeploymentData getDeploymentData() {
+ return deploymentData;
+ }
+
+ @Override
+ public TargetData getTargetData() {
+ return targetData;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.cms.deploy.osgi;
+
+import java.io.IOException;
+import java.lang.System.Logger;
+import java.lang.System.Logger.Level;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.List;
+import java.util.Map;
+import java.util.StringJoiner;
+import java.util.TreeMap;
+
+import org.argeo.init.a2.A2Source;
+import org.argeo.init.a2.FsA2Source;
+import org.argeo.init.osgi.OsgiBoot;
+import org.argeo.init.osgi.OsgiRuntimeContext;
+import org.argeo.slc.WellKnownConstants;
+import org.argeo.slc.build.Distribution;
+import org.argeo.slc.cms.deploy.CmsDeployedSystem;
+import org.argeo.slc.cms.deploy.CmsDeploymentData;
+import org.argeo.slc.cms.deploy.CmsTargetData;
+import org.argeo.slc.cms.deploy.SimpleCmsDeploymentData;
+import org.argeo.slc.cms.deploy.SimpleCmsTargetData;
+import org.argeo.slc.cms.distribution.A2Distribution;
+import org.argeo.slc.deploy.DeployedSystem;
+import org.argeo.slc.deploy.Deployment;
+import org.argeo.slc.deploy.DeploymentData;
+import org.argeo.slc.deploy.TargetData;
+
+public class CmsOsgiDeployment implements Deployment {
+ private final static Logger logger = System.getLogger(CmsOsgiDeployment.class.getName());
+
+ private A2Distribution distribution;
+ private CmsTargetData targetData;
+ private CmsDeploymentData deploymentData;
+
+ private CmsDeployedSystem deployedSystem;
+
+ private OsgiRuntimeContext runtimeContext;
+
+ @Override
+ public void run() {
+ try {
+ Map<String, String> config = new TreeMap<>();
+
+ // sources
+ StringJoiner sourcesProperty = new StringJoiner(",");
+ for (A2Source a2Source : distribution.getA2Sources()) {
+ sourcesProperty.add(a2Source.getUri().toString());
+ }
+ config.put(OsgiBoot.PROP_ARGEO_OSGI_SOURCES, sourcesProperty.toString());
+
+ // target
+ config.put(WellKnownConstants.OSGI_INSTANCE_AREA,
+ targetData.getInstanceData().toRealPath().toUri().toString());
+ if (targetData.getHttpPort() != null) {
+ config.put(WellKnownConstants.OSGI_HTTP_PORT, targetData.getHttpPort().toString());
+ }
+
+ Path configurationArea = Files.createTempDirectory("slc-cms-test");
+ config.put(WellKnownConstants.OSGI_CONFIGURATION_AREA, configurationArea.toUri().toString());
+
+ // modules activation
+ for (int startLevel = 0; startLevel <= 6; startLevel++) {
+ List<String> modules = deploymentData.getModulesToActivate(startLevel);
+ if (modules.size() != 0) {
+ String startProperty = String.join(",", modules);
+ config.put(OsgiBoot.PROP_ARGEO_OSGI_START + "." + startLevel + ".node", startProperty);
+ }
+ }
+
+ config.put("org.eclipse.equinox.http.jetty.autostart", "false");
+ config.put("org.osgi.framework.bootdelegation",
+ "com.sun.jndi.ldap,com.sun.jndi.ldap.sasl,com.sun.security.jgss,com.sun.jndi.dns,com.sun.nio.file,com.sun.nio.sctp");
+ config.put("eclipse.ignoreApp", "true");
+ config.put("osgi.noShutdown", "true");
+
+// config.put("osgi.console", "true");
+
+ // initialise
+ for (String key : config.keySet()) {
+// System.out.println(key + "=" + config.get(key));
+ logger.log(Level.INFO, () -> key + "=" + config.get(key));
+ }
+
+ runtimeContext = new OsgiRuntimeContext(config);
+ runtimeContext.run();
+
+ deployedSystem = new CmsOsgiDeployedSystem(runtimeContext.getFramework().getBundleContext(), distribution,
+ targetData, deploymentData);
+
+ } catch (Exception e) {
+ throw new IllegalStateException("Cannot run OSGi deployment", e);
+ }
+
+ }
+
+ @Override
+ public DeployedSystem getDeployedSystem() {
+ return deployedSystem;
+ }
+
+ @Override
+ public void setTargetData(TargetData targetData) {
+ this.targetData = (CmsTargetData) targetData;
+ }
+
+ @Override
+ public void setDeploymentData(DeploymentData deploymentData) {
+ this.deploymentData = (CmsDeploymentData) deploymentData;
+ }
+
+ @Override
+ public void setDistribution(Distribution distribution) {
+ this.distribution = (A2Distribution) distribution;
+ }
+
+ public OsgiRuntimeContext getRuntimeContext() {
+ return runtimeContext;
+ }
+
+ public static void main(String[] args) {
+ try {
+ Path userHome = Paths.get(System.getProperty("user.home"));
+
+ // distribution
+ Path a2Base = userHome.resolve("dev/git/unstable/output/a2");
+ A2Distribution distribution = new A2Distribution();
+ distribution.getA2Sources().add(new FsA2Source(a2Base));
+
+ // target data
+ Path instanceData = userHome.resolve("dev/git/unstable/argeo-slc/sdk/exec/cms-deployment/data");
+ Files.createDirectories(instanceData);
+ Integer httpPort = 7070;
+ SimpleCmsTargetData targetData = new SimpleCmsTargetData(instanceData, httpPort);
+
+ // deployment data
+ SimpleCmsDeploymentData deploymentData = new SimpleCmsDeploymentData();
+ deploymentData.getModulesToActivate(2).add("org.eclipse.equinox.http.servlet");
+ deploymentData.getModulesToActivate(2).add("org.eclipse.equinox.cm");
+ deploymentData.getModulesToActivate(2).add("org.apache.felix.scr");
+ deploymentData.getModulesToActivate(2).add("org.eclipse.rap.rwt.osgi");
+
+ deploymentData.getModulesToActivate(3).add("org.argeo.cms");
+
+ deploymentData.getModulesToActivate(4).add("org.argeo.cms.servlet");
+ deploymentData.getModulesToActivate(4).add("org.argeo.cms.ui.rap");
+ deploymentData.getModulesToActivate(4).add("org.argeo.cms.jcr");
+
+ deploymentData.getModulesToActivate(5).add("org.argeo.cms.e4.rap");
+
+ CmsOsgiDeployment deployment = new CmsOsgiDeployment();
+ deployment.setDistribution(distribution);
+ deployment.setTargetData(targetData);
+ deployment.setDeploymentData(deploymentData);
+ deployment.run();
+
+ boolean multiple = false;
+ if (multiple) {
+
+ Path instanceData2 = userHome.resolve("dev/git/unstable/argeo-slc/sdk/exec/cms-deployment2/data");
+ Files.createDirectories(instanceData2);
+ Integer httpPort2 = 7071;
+ SimpleCmsTargetData targetData2 = new SimpleCmsTargetData(instanceData2, httpPort2);
+
+ CmsOsgiDeployment deployment2 = new CmsOsgiDeployment();
+ deployment2.setDistribution(distribution);
+ deployment2.setTargetData(targetData2);
+ deployment2.setDeploymentData(deploymentData);
+ deployment2.run();
+ }
+
+ deployment.getRuntimeContext().waitForStop(0);
+
+ } catch (IOException | InterruptedException e) {
+ e.printStackTrace();
+ System.exit(1);
+ }
+ }
+
+}
--- /dev/null
+package org.argeo.slc.cms.distribution;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.argeo.init.a2.A2Branch;
+import org.argeo.init.a2.A2Component;
+import org.argeo.init.a2.A2Contribution;
+import org.argeo.init.a2.A2Module;
+import org.argeo.init.a2.A2Source;
+import org.argeo.slc.CategoryNameVersion;
+import org.argeo.slc.DefaultCategoryNameVersion;
+import org.argeo.slc.NameVersion;
+import org.argeo.slc.build.Distribution;
+import org.argeo.slc.build.ModularDistribution;
+
+public class A2Distribution implements ModularDistribution {
+ private List<A2Source> a2Sources = new ArrayList<>();
+
+ @Override
+ public String getDistributionId() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public String getName() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public String getVersion() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Iterator<? extends NameVersion> nameVersions() {
+ List<CategoryNameVersion> nameVersions = new ArrayList<>();
+ for (A2Source a2Source : a2Sources) {
+ for (A2Contribution a2Contribution : a2Source.listContributions(null)) {
+ for (A2Component a2Component : a2Contribution.listComponents(null)) {
+ for (A2Branch a2Branch : a2Component.listBranches(null)) {
+ for (A2Module a2Module : a2Branch.listModules(null)) {
+ CategoryNameVersion nameVersion = new DefaultCategoryNameVersion(a2Contribution.getId(),
+ a2Component.getId(), a2Module.getVersion().toString());
+ nameVersions.add(nameVersion);
+ }
+ }
+ }
+ }
+ }
+ return nameVersions.iterator();
+ }
+
+ @Override
+ public Distribution getModuleDistribution(String moduleName, String moduleVersion) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public Object getModulesDescriptor(String descriptorType) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public List<A2Source> getA2Sources() {
+ return a2Sources;
+ }
+
+
+}
--- /dev/null
+package org.argeo.slc.cms.distribution;
+
+import org.argeo.init.a2.A2Module;
+import org.argeo.slc.build.Distribution;
+
+public class A2ModuleDistribution implements Distribution {
+ private A2Module a2Module;
+
+ @Override
+ public String getDistributionId() {
+ return a2Module.getCoordinates();
+ }
+
+}
--- /dev/null
+package org.argeo.slc.cms.httpclient3;
+
+import org.apache.commons.httpclient.Credentials;
+import org.apache.commons.httpclient.auth.AuthScheme;
+import org.apache.commons.httpclient.auth.CredentialsNotAvailableException;
+import org.apache.commons.httpclient.auth.CredentialsProvider;
+
+/** SPNEGO credential provider */
+public class HttpCredentialProvider implements CredentialsProvider {
+
+ @Override
+ public Credentials getCredentials(AuthScheme scheme, String host, int port, boolean proxy)
+ throws CredentialsNotAvailableException {
+ if (scheme instanceof SpnegoAuthScheme)
+ return new SpnegoCredentials();
+ else
+ throw new UnsupportedOperationException("Auth scheme " + scheme.getSchemeName() + " not supported");
+ }
+
+}
--- /dev/null
+package org.argeo.slc.cms.httpclient3;
+
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+
+import javax.security.auth.Subject;
+import javax.security.auth.login.LoginContext;
+
+import org.apache.commons.httpclient.Credentials;
+import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.HttpMethod;
+import org.apache.commons.httpclient.auth.AuthPolicy;
+import org.apache.commons.httpclient.auth.AuthScheme;
+import org.apache.commons.httpclient.auth.AuthenticationException;
+import org.apache.commons.httpclient.auth.CredentialsProvider;
+import org.apache.commons.httpclient.auth.MalformedChallengeException;
+import org.apache.commons.httpclient.methods.GetMethod;
+import org.apache.commons.httpclient.params.DefaultHttpParams;
+import org.apache.commons.httpclient.params.HttpMethodParams;
+import org.apache.commons.httpclient.params.HttpParams;
+import org.argeo.cms.auth.RemoteAuthUtils;
+
+//// Register client-side SPNEGO auth scheme
+//AuthPolicy.registerAuthScheme(SpnegoAuthScheme.NAME, SpnegoAuthScheme.class);
+//HttpParams params = DefaultHttpParams.getDefaultParams();
+//ArrayList<String> schemes = new ArrayList<>();
+//schemes.add(SpnegoAuthScheme.NAME);// SPNEGO preferred
+//// schemes.add(AuthPolicy.BASIC);// incompatible with Basic
+//params.setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, schemes);
+//params.setParameter(CredentialsProvider.PROVIDER, new HttpCredentialProvider());
+//params.setParameter(HttpMethodParams.COOKIE_POLICY, KernelConstants.COOKIE_POLICY_BROWSER_COMPATIBILITY);
+//// params.setCookiePolicy(CookiePolicy.BROWSER_COMPATIBILITY);
+
+
+
+/** Implementation of the SPNEGO auth scheme. */
+public class SpnegoAuthScheme implements AuthScheme {
+// private final static Log log = LogFactory.getLog(SpnegoAuthScheme.class);
+
+ public static final String NAME = "Negotiate";
+// private final static Oid KERBEROS_OID;
+// static {
+// try {
+// KERBEROS_OID = new Oid("1.3.6.1.5.5.2");
+// } catch (GSSException e) {
+// throw new IllegalStateException("Cannot create Kerberos OID", e);
+// }
+// }
+
+ private final static String DEFAULT_KERBEROS_SERVICE = "HTTP";
+
+ private boolean complete = false;
+ private String realm;
+
+ @Override
+ public void processChallenge(String challenge) throws MalformedChallengeException {
+ // if(tokenStr!=null){
+ // log.error("Received challenge while there is a token. Failing.");
+ // complete = false;
+ // }
+
+ }
+
+ @Override
+ public String getSchemeName() {
+ return NAME;
+ }
+
+ @Override
+ public String getParameter(String name) {
+ return null;
+ }
+
+ @Override
+ public String getRealm() {
+ return realm;
+ }
+
+ @Override
+ public String getID() {
+ return NAME;
+ }
+
+ @Override
+ public boolean isConnectionBased() {
+ return true;
+ }
+
+ @Override
+ public boolean isComplete() {
+ return complete;
+ }
+
+ @Override
+ public String authenticate(Credentials credentials, String method, String uri) throws AuthenticationException {
+ // log.debug("authenticate " + method + " " + uri);
+ // return null;
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public String authenticate(Credentials credentials, HttpMethod method) throws AuthenticationException {
+// GSSContext context = null;
+ String hostname;
+ try {
+ hostname = method.getURI().getHost();
+ String tokenStr = RemoteAuthUtils.createGssToken(null, DEFAULT_KERBEROS_SERVICE, hostname);
+ return "Negotiate " + tokenStr;
+ } catch (Exception e1) {
+ complete = true;
+ throw new AuthenticationException("Cannot authenticate " + method, e1);
+ }
+// String serverPrinc = DEFAULT_KERBEROS_SERVICE + "@" + hostname;
+//
+// try {
+// // Get service's principal name
+// GSSManager manager = GSSManager.getInstance();
+// GSSName serverName = manager.createName(serverPrinc, GSSName.NT_HOSTBASED_SERVICE, KERBEROS_OID);
+//
+// // Get the context for authentication
+// context = manager.createContext(serverName, KERBEROS_OID, null, GSSContext.DEFAULT_LIFETIME);
+// // context.requestMutualAuth(true); // Request mutual authentication
+// // context.requestConf(true); // Request confidentiality
+// context.requestCredDeleg(true);
+//
+// byte[] token = new byte[0];
+//
+// // token is ignored on the first call
+// token = context.initSecContext(token, 0, token.length);
+//
+// // Send a token to the server if one was generated by
+// // initSecContext
+// if (token != null) {
+// tokenStr = Base64.getEncoder().encodeToString(token);
+// // complete=true;
+// }
+// } catch (GSSException e) {
+// complete = true;
+// throw new AuthenticationException("Cannot authenticate to " + serverPrinc, e);
+// }
+ }
+
+ public static void main(String[] args) {
+ String principal = System.getProperty("javax.security.auth.login.name");
+ if (args.length == 0 || principal == null) {
+ System.err.println("usage: java -Djavax.security.auth.login.name=<principal@REALM> "
+ + SpnegoAuthScheme.class.getName() + " <url>");
+ System.exit(1);
+ return;
+ }
+ String url = args[0];
+
+ URL jaasUrl = SpnegoAuthScheme.class.getResource("jaas.cfg");
+ System.setProperty("java.security.auth.login.config", jaasUrl.toExternalForm());
+ try {
+ LoginContext lc = new LoginContext("SINGLE_USER");
+ lc.login();
+
+ AuthPolicy.registerAuthScheme(SpnegoAuthScheme.NAME, SpnegoAuthScheme.class);
+ HttpParams params = DefaultHttpParams.getDefaultParams();
+ ArrayList<String> schemes = new ArrayList<>();
+ schemes.add(SpnegoAuthScheme.NAME);
+ params.setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, schemes);
+ params.setParameter(CredentialsProvider.PROVIDER, new HttpCredentialProvider());
+
+ int responseCode = Subject.doAs(lc.getSubject(), new PrivilegedExceptionAction<Integer>() {
+ public Integer run() throws Exception {
+ HttpClient httpClient = new HttpClient();
+ return httpClient.executeMethod(new GetMethod(url));
+ }
+ });
+ System.out.println("Reponse code: " + responseCode);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+}
--- /dev/null
+package org.argeo.slc.cms.httpclient3;
+
+import org.apache.commons.httpclient.Credentials;
+
+public class SpnegoCredentials implements Credentials {
+
+}
--- /dev/null
+SINGLE_USER {
+ com.sun.security.auth.module.Krb5LoginModule required
+ useTicketCache=true
+ debug=true;
+};
+
+com.sun.security.jgss.krb5.initiate {
+ com.sun.security.auth.module.Krb5LoginModule
+ required useTicketCache=true;
+};
\ No newline at end of file
--- /dev/null
+package org.argeo.slc.cms.test;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+public class CmsSmokeTest {
+
+ public static void main(String[] args) throws IOException {
+ Path instanceData;
+ if (args.length > 0) {
+ instanceData = Paths.get(args[0]);
+ } else {
+ instanceData = Files.createTempDirectory("cms-test");
+ }
+
+ }
+
+}
-Subproject commit 3dc9a223f1177b4eb34ccac1e96a354d4dd8ee25
+Subproject commit a261043de5f9d90373bb1e7f395e3371ba9e67ef
--- /dev/null
+major=2
+minor=3
+micro=4
+qualifier=.next
\ No newline at end of file