+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11"/>
- <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
- <classpathentry kind="src" path="src"/>
- <classpathentry kind="output" path="bin"/>
-</classpath>
+++ /dev/null
-/bin/
-/target/
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>org.argeo.cms.integration</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.ManifestBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.SchemaBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.pde.PluginNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
+++ /dev/null
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.annotation.inheritNullAnnotations=disabled
-org.eclipse.jdt.core.compiler.annotation.missingNonNullByDefaultAnnotation=ignore
-org.eclipse.jdt.core.compiler.annotation.nonnull=org.eclipse.jdt.annotation.NonNull
-org.eclipse.jdt.core.compiler.annotation.nonnull.secondary=
-org.eclipse.jdt.core.compiler.annotation.nonnullbydefault=org.eclipse.jdt.annotation.NonNullByDefault
-org.eclipse.jdt.core.compiler.annotation.nonnullbydefault.secondary=
-org.eclipse.jdt.core.compiler.annotation.nullable=org.eclipse.jdt.annotation.Nullable
-org.eclipse.jdt.core.compiler.annotation.nullable.secondary=
-org.eclipse.jdt.core.compiler.annotation.nullanalysis=disabled
-org.eclipse.jdt.core.compiler.problem.APILeak=warning
-org.eclipse.jdt.core.compiler.problem.annotatedTypeArgumentToUnannotated=info
-org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=warning
-org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
-org.eclipse.jdt.core.compiler.problem.comparingIdentical=warning
-org.eclipse.jdt.core.compiler.problem.deadCode=warning
-org.eclipse.jdt.core.compiler.problem.deprecation=warning
-org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
-org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
-org.eclipse.jdt.core.compiler.problem.discouragedReference=warning
-org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
-org.eclipse.jdt.core.compiler.problem.explicitlyClosedAutoCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
-org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
-org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
-org.eclipse.jdt.core.compiler.problem.finalParameterBound=warning
-org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=warning
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=warning
-org.eclipse.jdt.core.compiler.problem.includeNullInfoFromAsserts=disabled
-org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=warning
-org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=warning
-org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
-org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
-org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=warning
-org.eclipse.jdt.core.compiler.problem.missingDefaultCase=ignore
-org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingEnumCaseDespiteDefault=disabled
-org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
-org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
-org.eclipse.jdt.core.compiler.problem.missingSerialVersion=warning
-org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
-org.eclipse.jdt.core.compiler.problem.noEffectAssignment=warning
-org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=warning
-org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
-org.eclipse.jdt.core.compiler.problem.nonnullParameterAnnotationDropped=warning
-org.eclipse.jdt.core.compiler.problem.nonnullTypeVariableFromLegacyInvocation=warning
-org.eclipse.jdt.core.compiler.problem.nullAnnotationInferenceConflict=error
-org.eclipse.jdt.core.compiler.problem.nullReference=warning
-org.eclipse.jdt.core.compiler.problem.nullSpecViolation=error
-org.eclipse.jdt.core.compiler.problem.nullUncheckedConversion=warning
-org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=warning
-org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.pessimisticNullAnalysisForFreeTypeVariables=warning
-org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
-org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
-org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore
-org.eclipse.jdt.core.compiler.problem.rawTypeReference=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=warning
-org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore
-org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
-org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore
-org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
-org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=warning
-org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
-org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
-org.eclipse.jdt.core.compiler.problem.suppressWarningsNotFullyAnalysed=info
-org.eclipse.jdt.core.compiler.problem.syntacticNullAnalysisForFields=disabled
-org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
-org.eclipse.jdt.core.compiler.problem.terminalDeprecation=warning
-org.eclipse.jdt.core.compiler.problem.typeParameterHiding=warning
-org.eclipse.jdt.core.compiler.problem.unavoidableGenericTypeProblems=enabled
-org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=warning
-org.eclipse.jdt.core.compiler.problem.unclosedCloseable=warning
-org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
-org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentType=warning
-org.eclipse.jdt.core.compiler.problem.unlikelyCollectionMethodArgumentTypeStrict=disabled
-org.eclipse.jdt.core.compiler.problem.unlikelyEqualsArgumentType=info
-org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
-org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
-org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
-org.eclipse.jdt.core.compiler.problem.unstableAutoModuleName=warning
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
-org.eclipse.jdt.core.compiler.problem.unusedExceptionParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedImport=warning
-org.eclipse.jdt.core.compiler.problem.unusedLabel=warning
-org.eclipse.jdt.core.compiler.problem.unusedLocal=warning
-org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
-org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
-org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=warning
-org.eclipse.jdt.core.compiler.problem.unusedTypeParameter=ignore
-org.eclipse.jdt.core.compiler.problem.unusedWarningToken=warning
-org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=warning
+++ /dev/null
-/MANIFEST.MF
+++ /dev/null
-Import-Package:\
-javax.jcr.nodetype,\
-org.apache.commons.logging,\
-org.apache.jackrabbit.*;version="[1,4)",\
-javax.servlet.*;version="[3,5)",\
-*
\ No newline at end of file
+++ /dev/null
-source.. = src/
-output.. = bin/
-bin.includes = META-INF/,\
- .
-additional.bundles = org.apache.sshd.common,\
- org.apache.sshd.core,\
- org.slf4j.api,\
- org.argeo.ext.slf4j
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-import org.apache.commons.vfs2.provider.sftp.SftpFileSystemConfigBuilder;
-
-/**
- * Simplify atomic backups implementation, especially by managing VFS.
- */
-public abstract class AbstractAtomicBackup implements AtomicBackup {
- private String name;
- private String compression = "bz2";
-
- protected abstract void writeBackup(FileObject targetFo);
-
- public AbstractAtomicBackup() {
- }
-
- public AbstractAtomicBackup(String name) {
- this.name = name;
- }
-
- public void init() {
- if (name == null)
- throw new MaintenanceException("Atomic backup name must be set");
- }
-
- public void destroy() {
-
- }
-
- @Override
- public String backup(FileSystemManager fileSystemManager,
- String backupsBase, BackupContext backupContext,
- FileSystemOptions opts) {
- if (name == null)
- throw new MaintenanceException("Atomic backup name must be set");
-
- FileObject targetFo = null;
- try {
- if (backupsBase.startsWith("sftp:"))
- SftpFileSystemConfigBuilder.getInstance()
- .setStrictHostKeyChecking(opts, "no");
- if (compression == null || compression.equals("none"))
- targetFo = fileSystemManager.resolveFile(backupsBase + '/'
- + backupContext.getRelativeFolder() + '/' + name, opts);
- else if (compression.equals("bz2"))
- targetFo = fileSystemManager.resolveFile("bz2:" + backupsBase
- + '/' + backupContext.getRelativeFolder() + '/' + name
- + ".bz2" + "!" + name, opts);
- else if (compression.equals("gz"))
- targetFo = fileSystemManager.resolveFile("gz:" + backupsBase
- + '/' + backupContext.getRelativeFolder() + '/' + name
- + ".gz" + "!" + name, opts);
- else
- throw new MaintenanceException("Unsupported compression "
- + compression);
-
- writeBackup(targetFo);
-
- return targetFo.toString();
- } catch (Exception e) {
- throw new MaintenanceException("Cannot backup " + name + " to "
- + targetFo, e);
- } finally {
- BackupUtils.closeFOQuietly(targetFo);
- }
- }
-
- public void setName(String name) {
- this.name = name;
- }
-
- public String getName() {
- return name;
- }
-
- public void setCompression(String compression) {
- this.compression = compression;
- }
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-
-/** Performs the backup of a single component, typically a database dump */
-public interface AtomicBackup {
- /** Name identifiying this backup */
- public String getName();
-
- /**
- * Retrieves the data of the component in a format that allows to restore
- * the component
- *
- * @param backupContext
- * the context of this backup
- * @return the VFS URI of the generated file or directory
- */
- public String backup(FileSystemManager fileSystemManager,
- String backupsBase, BackupContext backupContext,
- FileSystemOptions opts);
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import java.text.DateFormat;
-import java.util.Date;
-
-/**
- * Transient information of a given backup, centralizing common information such
- * as timestamp and location.
- */
-public interface BackupContext {
- /** Backup date */
- public Date getTimestamp();
-
- /** Formatted backup date */
- public String getTimestampAsString();
-
- /** System name */
- public String getSystemName();
-
- /** Local base */
- public String getRelativeFolder();
-
- /** Date format */
- public DateFormat getDateFormat();
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import org.apache.commons.vfs2.FileSystemException;
-import org.apache.commons.vfs2.impl.DefaultFileSystemManager;
-import org.apache.commons.vfs2.provider.bzip2.Bzip2FileProvider;
-import org.apache.commons.vfs2.provider.ftp.FtpFileProvider;
-import org.apache.commons.vfs2.provider.gzip.GzipFileProvider;
-import org.apache.commons.vfs2.provider.local.DefaultLocalFileProvider;
-import org.apache.commons.vfs2.provider.ram.RamFileProvider;
-import org.apache.commons.vfs2.provider.sftp.SftpFileProvider;
-import org.apache.commons.vfs2.provider.url.UrlFileProvider;
-
-/**
- * Programatically configured VFS file system manager which can be declared as a
- * bean and associated with a life cycle (methods
- * {@link DefaultFileSystemManager#init()} and
- * {@link DefaultFileSystemManager#close()}). Supports bz2, file, ram, gzip,
- * ftp, sftp
- */
-public class BackupFileSystemManager extends DefaultFileSystemManager {
-
- public BackupFileSystemManager() {
- super();
- try {
- addProvider("file", new DefaultLocalFileProvider());
- addProvider("bz2", new Bzip2FileProvider());
- addProvider("ftp", new FtpFileProvider());
- addProvider("sftp", new SftpFileProvider());
- addProvider("gzip", new GzipFileProvider());
- addProvider("ram", new RamFileProvider());
- setDefaultProvider(new UrlFileProvider());
- } catch (FileSystemException e) {
- throw new MaintenanceException("Cannot configure backup file provider", e);
- }
- }
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import java.text.DateFormat;
-
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-
-/** Purges previous backups */
-public interface BackupPurge {
- /**
- * Purge the backups identified by these arguments. Although these are the
- * same fields as a {@link BackupContext} we don't pass it as argument since
- * we want to use this interface to purge remote backups as well (that is,
- * with a different base), or outside the scope of a running backup.
- */
- public void purge(FileSystemManager fileSystemManager, String base,
- String name, DateFormat dateFormat, FileSystemOptions opts);
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backup utilities */
-public class BackupUtils {
- /** Close a file object quietly even if it is null or throws an exception. */
- public static void closeFOQuietly(FileObject fo) {
- if (fo != null) {
- try {
- fo.close();
- } catch (Exception e) {
- // silent
- }
- }
- }
-
- /** Prevents instantiation */
- private BackupUtils() {
- }
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-@Deprecated
-class MaintenanceException extends RuntimeException {
- private static final long serialVersionUID = -5770049663929537270L;
-
- public MaintenanceException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public MaintenanceException(String message) {
- super(message);
- }
-
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backups a MySQL database using mysqldump. */
-public class MySqlBackup extends OsCallBackup {
- private String mysqldumpLocation = "/usr/bin/mysqldump";
-
- private String dbUser;
- private String dbPassword;
- private String dbName;
-
- public MySqlBackup() {
- }
-
- public MySqlBackup(String dbUser, String dbPassword, String dbName) {
- this.dbUser = dbUser;
- this.dbPassword = dbPassword;
- this.dbName = dbName;
- init();
- }
-
- @Override
- public void init() {
- if (getName() == null)
- setName(dbName + ".mysql");
- super.init();
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- if (getCommand() == null)
- setCommand(mysqldumpLocation
- + " --lock-tables --add-locks --add-drop-table"
- + " -u ${dbUser} --password=${dbPassword} --databases ${dbName}");
- getVariables().put("dbUser", dbUser);
- getVariables().put("dbPassword", dbPassword);
- getVariables().put("dbName", dbName);
-
- super.writeBackup(targetFo);
- }
-
- public void setDbUser(String dbUser) {
- this.dbUser = dbUser;
- }
-
- public void setDbPassword(String dbPassword) {
- this.dbPassword = dbPassword;
- }
-
- public void setDbName(String dbName) {
- this.dbName = dbName;
- }
-
- public void setMysqldumpLocation(String mysqldumpLocation) {
- this.mysqldumpLocation = mysqldumpLocation;
- }
-
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backups an OpenLDAP server using slapcat */
-public class OpenLdapBackup extends OsCallBackup {
- private String slapcatLocation = "/usr/sbin/slapcat";
- private String slapdConfLocation = "/etc/openldap/slapd.conf";
- private String baseDn;
-
- public OpenLdapBackup() {
- super();
- }
-
- public OpenLdapBackup(String baseDn) {
- super();
- this.baseDn = baseDn;
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- if (baseDn == null)
- throw new MaintenanceException("Base DN must be set");
-
- if (getCommand() == null)
- setCommand(slapcatLocation
- + " -f ${slapdConfLocation} -b '${baseDn}'");
- getVariables().put("slapdConfLocation", slapdConfLocation);
- getVariables().put("baseDn", baseDn);
-
- super.writeBackup(targetFo);
- }
-
- public void setSlapcatLocation(String slapcatLocation) {
- this.slapcatLocation = slapcatLocation;
- }
-
- public void setSlapdConfLocation(String slapdConfLocation) {
- this.slapdConfLocation = slapdConfLocation;
- }
-
- public void setBaseDn(String baseDn) {
- this.baseDn = baseDn;
- }
-
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import java.io.ByteArrayOutputStream;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.exec.DefaultExecutor;
-import org.apache.commons.exec.ExecuteException;
-import org.apache.commons.exec.ExecuteStreamHandler;
-import org.apache.commons.exec.Executor;
-import org.apache.commons.exec.PumpStreamHandler;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.vfs2.FileContent;
-import org.apache.commons.vfs2.FileObject;
-import org.argeo.api.cms.CmsLog;
-
-/**
- * Runs an OS command and save its standard output as a file. Typically used for
- * MySQL or OpenLDAP dumps.
- */
-public class OsCallBackup extends AbstractAtomicBackup {
- private final static CmsLog log = CmsLog.getLog(OsCallBackup.class);
-
- private String command;
- private Map<String, String> variables = new HashMap<String, String>();
- private Executor executor = new DefaultExecutor();
-
- private Map<String, String> environment = new HashMap<String, String>();
-
- /** Name of the sudo user, root if "", not sudo if null */
- private String sudo = null;
-
- public OsCallBackup() {
- }
-
- public OsCallBackup(String name) {
- super(name);
- }
-
- public OsCallBackup(String name, String command) {
- super(name);
- this.command = command;
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- String commandToUse = command;
-
- // sudo
- if (sudo != null) {
- if (sudo.equals(""))
- commandToUse = "sudo " + commandToUse;
- else
- commandToUse = "sudo -u " + sudo + " " + commandToUse;
- }
-
- CommandLine commandLine = CommandLine.parse(commandToUse, variables);
- ByteArrayOutputStream errBos = new ByteArrayOutputStream();
- if (log.isTraceEnabled())
- log.trace(commandLine.toString());
-
- try {
- // stdout
- FileContent targetContent = targetFo.getContent();
- // stderr
- ExecuteStreamHandler streamHandler = new PumpStreamHandler(targetContent.getOutputStream(), errBos);
- executor.setStreamHandler(streamHandler);
- executor.execute(commandLine, environment);
- } catch (ExecuteException e) {
- byte[] err = errBos.toByteArray();
- String errStr = new String(err);
- throw new MaintenanceException("Process " + commandLine + " failed (" + e.getExitValue() + "): " + errStr, e);
- } catch (Exception e) {
- byte[] err = errBos.toByteArray();
- String errStr = new String(err);
- throw new MaintenanceException("Process " + commandLine + " failed: " + errStr, e);
- } finally {
- IOUtils.closeQuietly(errBos);
- }
- }
-
- public void setCommand(String command) {
- this.command = command;
- }
-
- protected String getCommand() {
- return command;
- }
-
- /**
- * A reference to the environment variables that will be passed to the
- * process. Empty by default.
- */
- protected Map<String, String> getEnvironment() {
- return environment;
- }
-
- protected Map<String, String> getVariables() {
- return variables;
- }
-
- public void setVariables(Map<String, String> variables) {
- this.variables = variables;
- }
-
- public void setExecutor(Executor executor) {
- this.executor = executor;
- }
-
- public void setSudo(String sudo) {
- this.sudo = sudo;
- }
-
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backups a PostgreSQL database using pg_dump. */
-public class PostgreSqlBackup extends OsCallBackup {
- /**
- * PostgreSQL password environment variable (see
- * http://stackoverflow.com/questions
- * /2893954/how-to-pass-in-password-to-pg-dump)
- */
- protected final static String PGPASSWORD = "PGPASSWORD";
-
- private String pgDumpLocation = "/usr/bin/pg_dump";
-
- private String dbUser;
- private String dbPassword;
- private String dbName;
-
- public PostgreSqlBackup() {
- super();
- }
-
- public PostgreSqlBackup(String dbUser, String dbPassword, String dbName) {
- this.dbUser = dbUser;
- this.dbPassword = dbPassword;
- this.dbName = dbName;
- init();
- }
-
- @Override
- public void init() {
- // disable compression since pg_dump is used with -Fc option
- setCompression(null);
-
- if (getName() == null)
- setName(dbName + ".pgdump");
- super.init();
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- if (getCommand() == null) {
- getEnvironment().put(PGPASSWORD, dbPassword);
- setCommand(pgDumpLocation + " -Fc" + " -U ${dbUser} ${dbName}");
- }
- getVariables().put("dbUser", dbUser);
- getVariables().put("dbPassword", dbPassword);
- getVariables().put("dbName", dbName);
-
- super.writeBackup(targetFo);
- }
-
- public void setDbUser(String dbUser) {
- this.dbUser = dbUser;
- }
-
- public void setDbPassword(String dbPassword) {
- this.dbPassword = dbPassword;
- }
-
- public void setDbName(String dbName) {
- this.dbName = dbName;
- }
-
- public void setPgDumpLocation(String mysqldumpLocation) {
- this.pgDumpLocation = mysqldumpLocation;
- }
-
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.apache.commons.vfs2.FileSystemManager;
-
-/** Simple implementation of a backup context */
-public class SimpleBackupContext implements BackupContext {
- private DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmm");
- private final Date timestamp;
- private final String name;
-
- private final FileSystemManager fileSystemManager;
-
- public SimpleBackupContext(FileSystemManager fileSystemManager,
- String backupsBase, String name) {
- this.name = name;
- this.timestamp = new Date();
- this.fileSystemManager = fileSystemManager;
- }
-
- public Date getTimestamp() {
- return timestamp;
- }
-
- public String getTimestampAsString() {
- return dateFormat.format(timestamp);
- }
-
- public String getSystemName() {
- return name;
- }
-
- public String getRelativeFolder() {
- return name + '/' + getTimestampAsString();
- }
-
- public DateFormat getDateFormat() {
- return dateFormat;
- }
-
- public FileSystemManager getFileSystemManager() {
- return fileSystemManager;
- }
-
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import java.text.DateFormat;
-import java.time.Period;
-import java.time.ZoneId;
-import java.time.ZonedDateTime;
-import java.util.Date;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-import org.apache.commons.vfs2.FileObject;
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-import org.apache.commons.vfs2.Selectors;
-import org.argeo.api.cms.CmsLog;
-
-/** Simple backup purge which keeps backups only for a given number of days */
-public class SimpleBackupPurge implements BackupPurge {
- private final static CmsLog log = CmsLog.getLog(SimpleBackupPurge.class);
-
- private Integer daysKept = 30;
-
- @Override
- public void purge(FileSystemManager fileSystemManager, String base, String name, DateFormat dateFormat,
- FileSystemOptions opts) {
- try {
- ZonedDateTime nowDt = ZonedDateTime.now();
- FileObject baseFo = fileSystemManager.resolveFile(base + '/' + name, opts);
-
- SortedMap<ZonedDateTime, FileObject> toDelete = new TreeMap<ZonedDateTime, FileObject>();
- int backupCount = 0;
-
- // make sure base dir exists
- baseFo.createFolder();
-
- // scan backups and list those which should be deleted
- for (FileObject backupFo : baseFo.getChildren()) {
- String backupName = backupFo.getName().getBaseName();
- Date backupDate = dateFormat.parse(backupName);
- backupCount++;
- ZonedDateTime backupDt = ZonedDateTime.ofInstant(backupDate.toInstant(), ZoneId.systemDefault());
- Period sinceThen = Period.between(backupDt.toLocalDate(), nowDt.toLocalDate());
- // new Period(backupDt, nowDt);
- int days = sinceThen.getDays();
- // int days = sinceThen.getMinutes();
- if (days > daysKept) {
- toDelete.put(backupDt, backupFo);
- }
- }
-
- if (toDelete.size() != 0 && toDelete.size() == backupCount) {
- // all backups would be deleted
- // but we want to keep at least one
- ZonedDateTime lastBackupDt = toDelete.firstKey();
- FileObject keptFo = toDelete.remove(lastBackupDt);
- log.warn("Backup " + keptFo + " kept although it is older than " + daysKept + " days.");
- }
-
- // delete old backups
- for (FileObject backupFo : toDelete.values()) {
- backupFo.delete(Selectors.SELECT_ALL);
- if (log.isDebugEnabled())
- log.debug("Deleted backup " + backupFo);
- }
- } catch (Exception e) {
- throw new MaintenanceException("Could not purge previous backups", e);
- }
-
- }
-
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import java.io.File;
-
-import org.apache.commons.vfs2.FileObject;
-
-/** Backups a Subversion repository using svnadmin. */
-public class SvnBackup extends OsCallBackup {
- private String svnadminLocation = "/usr/bin/svnadmin";
-
- private String repoLocation;
- private String repoName;
-
- public SvnBackup() {
- }
-
- public SvnBackup(String repoLocation) {
- this.repoLocation = repoLocation;
- init();
- }
-
- @Override
- public void init() {
- // use directory as repo name
- if (repoName == null)
- repoName = new File(repoLocation).getName();
-
- if (getName() == null)
- setName(repoName + ".svndump");
- super.init();
- }
-
- @Override
- public void writeBackup(FileObject targetFo) {
- if (getCommand() == null) {
- setCommand(svnadminLocation + " dump " + " ${repoLocation}");
- }
- getVariables().put("repoLocation", repoLocation);
-
- super.writeBackup(targetFo);
- }
-
- public void setRepoLocation(String repoLocation) {
- this.repoLocation = repoLocation;
- }
-
- public void setRepoName(String repoName) {
- this.repoName = repoName;
- }
-
- public void setSvnadminLocation(String mysqldumpLocation) {
- this.svnadminLocation = mysqldumpLocation;
- }
-
-}
+++ /dev/null
-package org.argeo.maintenance.backup.vfs;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.vfs2.FileObject;
-import org.apache.commons.vfs2.FileSystemException;
-import org.apache.commons.vfs2.FileSystemManager;
-import org.apache.commons.vfs2.FileSystemOptions;
-import org.apache.commons.vfs2.Selectors;
-import org.apache.commons.vfs2.UserAuthenticator;
-import org.apache.commons.vfs2.impl.DefaultFileSystemConfigBuilder;
-import org.argeo.api.cms.CmsLog;
-import org.argeo.util.LangUtils;
-
-/**
- * Combines multiple backups and transfer them to a remote location. Purges
- * remote and local data based on certain criteria.
- */
-public class SystemBackup implements Runnable {
- private final static CmsLog log = CmsLog.getLog(SystemBackup.class);
-
- private FileSystemManager fileSystemManager;
- private UserAuthenticator userAuthenticator = null;
-
- private String backupsBase;
- private String systemName;
-
- private List<AtomicBackup> atomicBackups = new ArrayList<AtomicBackup>();
- private BackupPurge backupPurge = new SimpleBackupPurge();
-
- private Map<String, UserAuthenticator> remoteBases = new HashMap<String, UserAuthenticator>();
-
- @Override
- public void run() {
- if (atomicBackups.size() == 0)
- throw new MaintenanceException("No atomic backup listed");
- List<String> failures = new ArrayList<String>();
-
- SimpleBackupContext backupContext = new SimpleBackupContext(fileSystemManager, backupsBase, systemName);
-
- // purge older backups
- FileSystemOptions opts = new FileSystemOptions();
- try {
- DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(opts, userAuthenticator);
- } catch (Exception e) {
- throw new MaintenanceException("Cannot create authentication", e);
- }
-
- try {
-
- backupPurge.purge(fileSystemManager, backupsBase, systemName, backupContext.getDateFormat(), opts);
- } catch (Exception e) {
- failures.add("Purge " + backupsBase + " failed: " + e.getMessage());
- log.error("Purge of " + backupsBase + " failed", e);
- }
-
- // perform backup
- for (AtomicBackup atomickBackup : atomicBackups) {
- try {
- String target = atomickBackup.backup(fileSystemManager, backupsBase, backupContext, opts);
- if (log.isDebugEnabled())
- log.debug("Performed backup " + target);
- } catch (Exception e) {
- String msg = "Atomic backup " + atomickBackup.getName() + " failed: "
- + LangUtils.chainCausesMessages(e);
- failures.add(msg);
- log.error(msg);
- if (log.isTraceEnabled())
- log.trace("Stacktrace of atomic backup " + atomickBackup.getName() + " failure.", e);
- }
- }
-
- // dispatch to remote
- for (String remoteBase : remoteBases.keySet()) {
- FileObject localBaseFo = null;
- FileObject remoteBaseFo = null;
- UserAuthenticator auth = remoteBases.get(remoteBase);
-
- // authentication
- FileSystemOptions remoteOpts = new FileSystemOptions();
- try {
- DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(remoteOpts, auth);
- backupPurge.purge(fileSystemManager, remoteBase, systemName, backupContext.getDateFormat(), remoteOpts);
- } catch (Exception e) {
- failures.add("Purge " + remoteBase + " failed: " + e.getMessage());
- log.error("Cannot purge " + remoteBase, e);
- }
-
- try {
- localBaseFo = fileSystemManager.resolveFile(backupsBase + '/' + backupContext.getRelativeFolder(),
- opts);
- remoteBaseFo = fileSystemManager.resolveFile(remoteBase + '/' + backupContext.getRelativeFolder(),
- remoteOpts);
- remoteBaseFo.copyFrom(localBaseFo, Selectors.SELECT_ALL);
- if (log.isDebugEnabled())
- log.debug("Copied backup to " + remoteBaseFo + " from " + localBaseFo);
- // }
- } catch (Exception e) {
- failures.add("Dispatch to " + remoteBase + " failed: " + e.getMessage());
- log.error("Cannot dispatch backups from " + backupContext.getRelativeFolder() + " to " + remoteBase, e);
- }
- BackupUtils.closeFOQuietly(localBaseFo);
- BackupUtils.closeFOQuietly(remoteBaseFo);
- }
-
- int failureCount = 0;
- if (failures.size() > 0) {
- StringBuffer buf = new StringBuffer();
- for (String failure : failures) {
- buf.append('\n').append(failureCount).append(" - ").append(failure);
- failureCount++;
- }
- throw new MaintenanceException(failureCount + " error(s) when running the backup,"
- + " check the logs and the backups as soon as possible." + buf);
- }
- }
-
- public void setFileSystemManager(FileSystemManager fileSystemManager) {
- this.fileSystemManager = fileSystemManager;
- }
-
- public void setBackupsBase(String backupsBase) {
- this.backupsBase = backupsBase;
- }
-
- public void setSystemName(String name) {
- this.systemName = name;
- }
-
- public void setAtomicBackups(List<AtomicBackup> atomicBackups) {
- this.atomicBackups = atomicBackups;
- }
-
- public void setBackupPurge(BackupPurge backupPurge) {
- this.backupPurge = backupPurge;
- }
-
- public void setUserAuthenticator(UserAuthenticator userAuthenticator) {
- this.userAuthenticator = userAuthenticator;
- }
-
- public void setRemoteBases(Map<String, UserAuthenticator> remoteBases) {
- this.remoteBases = remoteBases;
- }
-
- // public static void main(String args[]) {
- // while (true) {
- // try {
- // StandardFileSystemManager fsm = new StandardFileSystemManager();
- // fsm.init();
- //
- // SystemBackup systemBackup = new SystemBackup();
- // systemBackup.setSystemName("mySystem");
- // systemBackup
- // .setBackupsBase("/home/mbaudier/dev/src/commons/server/runtime/org.argeo.server.core/target");
- // systemBackup.setFileSystemManager(fsm);
- //
- // List<AtomicBackup> atomicBackups = new ArrayList<AtomicBackup>();
- //
- // MySqlBackup mySqlBackup = new MySqlBackup("root", "", "test");
- // atomicBackups.add(mySqlBackup);
- // PostgreSqlBackup postgreSqlBackup = new PostgreSqlBackup(
- // "argeo", "argeo", "gis_template");
- // atomicBackups.add(postgreSqlBackup);
- // SvnBackup svnBackup = new SvnBackup(
- // "/home/mbaudier/tmp/testsvnrepo");
- // atomicBackups.add(svnBackup);
- //
- // systemBackup.setAtomicBackups(atomicBackups);
- //
- // Map<String, UserAuthenticator> remoteBases = new HashMap<String,
- // UserAuthenticator>();
- // StaticUserAuthenticator userAuthenticator = new StaticUserAuthenticator(
- // null, "demo", "demo");
- // remoteBases.put("sftp://localhost/home/mbaudier/test",
- // userAuthenticator);
- // systemBackup.setRemoteBases(remoteBases);
- //
- // systemBackup.run();
- //
- // fsm.close();
- // } catch (FileSystemException e) {
- // // TODO Auto-generated catch block
- // e.printStackTrace();
- // System.exit(1);
- // }
- //
- // // wait
- // try {
- // Thread.sleep(120 * 1000);
- // } catch (InterruptedException e) {
- // e.printStackTrace();
- // }
- // }
- // }
-}
+++ /dev/null
-/** Argeo Node backup utilities based on Apache Commons VFS. */
-package org.argeo.maintenance.backup.vfs;
\ No newline at end of file
Import-Package: org.argeo.slc.deploy,\
+org.apache.commons.logging,\
*
\ No newline at end of file
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+import org.apache.commons.vfs2.provider.sftp.SftpFileSystemConfigBuilder;
+
+/**
+ * Simplify atomic backups implementation, especially by managing VFS.
+ */
+public abstract class AbstractAtomicBackup implements AtomicBackup {
+ private String name;
+ private String compression = "bz2";
+
+ protected abstract void writeBackup(FileObject targetFo);
+
+ public AbstractAtomicBackup() {
+ }
+
+ public AbstractAtomicBackup(String name) {
+ this.name = name;
+ }
+
+ public void init() {
+ if (name == null)
+ throw new MaintenanceException("Atomic backup name must be set");
+ }
+
+ public void destroy() {
+
+ }
+
+ @Override
+ public String backup(FileSystemManager fileSystemManager,
+ String backupsBase, BackupContext backupContext,
+ FileSystemOptions opts) {
+ if (name == null)
+ throw new MaintenanceException("Atomic backup name must be set");
+
+ FileObject targetFo = null;
+ try {
+ if (backupsBase.startsWith("sftp:"))
+ SftpFileSystemConfigBuilder.getInstance()
+ .setStrictHostKeyChecking(opts, "no");
+ if (compression == null || compression.equals("none"))
+ targetFo = fileSystemManager.resolveFile(backupsBase + '/'
+ + backupContext.getRelativeFolder() + '/' + name, opts);
+ else if (compression.equals("bz2"))
+ targetFo = fileSystemManager.resolveFile("bz2:" + backupsBase
+ + '/' + backupContext.getRelativeFolder() + '/' + name
+ + ".bz2" + "!" + name, opts);
+ else if (compression.equals("gz"))
+ targetFo = fileSystemManager.resolveFile("gz:" + backupsBase
+ + '/' + backupContext.getRelativeFolder() + '/' + name
+ + ".gz" + "!" + name, opts);
+ else
+ throw new MaintenanceException("Unsupported compression "
+ + compression);
+
+ writeBackup(targetFo);
+
+ return targetFo.toString();
+ } catch (Exception e) {
+ throw new MaintenanceException("Cannot backup " + name + " to "
+ + targetFo, e);
+ } finally {
+ BackupUtils.closeFOQuietly(targetFo);
+ }
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setCompression(String compression) {
+ this.compression = compression;
+ }
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+
+/** Performs the backup of a single component, typically a database dump */
+public interface AtomicBackup {
+ /** Name identifiying this backup */
+ public String getName();
+
+ /**
+ * Retrieves the data of the component in a format that allows to restore
+ * the component
+ *
+ * @param backupContext
+ * the context of this backup
+ * @return the VFS URI of the generated file or directory
+ */
+ public String backup(FileSystemManager fileSystemManager,
+ String backupsBase, BackupContext backupContext,
+ FileSystemOptions opts);
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.text.DateFormat;
+import java.util.Date;
+
+/**
+ * Transient information of a given backup, centralizing common information such
+ * as timestamp and location.
+ */
+public interface BackupContext {
+ /** Backup date */
+ public Date getTimestamp();
+
+ /** Formatted backup date */
+ public String getTimestampAsString();
+
+ /** System name */
+ public String getSystemName();
+
+ /** Local base */
+ public String getRelativeFolder();
+
+ /** Date format */
+ public DateFormat getDateFormat();
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.impl.DefaultFileSystemManager;
+import org.apache.commons.vfs2.provider.bzip2.Bzip2FileProvider;
+import org.apache.commons.vfs2.provider.ftp.FtpFileProvider;
+import org.apache.commons.vfs2.provider.gzip.GzipFileProvider;
+import org.apache.commons.vfs2.provider.local.DefaultLocalFileProvider;
+import org.apache.commons.vfs2.provider.ram.RamFileProvider;
+import org.apache.commons.vfs2.provider.sftp.SftpFileProvider;
+import org.apache.commons.vfs2.provider.url.UrlFileProvider;
+
+/**
+ * Programatically configured VFS file system manager which can be declared as a
+ * bean and associated with a life cycle (methods
+ * {@link DefaultFileSystemManager#init()} and
+ * {@link DefaultFileSystemManager#close()}). Supports bz2, file, ram, gzip,
+ * ftp, sftp
+ */
+public class BackupFileSystemManager extends DefaultFileSystemManager {
+
+ public BackupFileSystemManager() {
+ super();
+ try {
+ addProvider("file", new DefaultLocalFileProvider());
+ addProvider("bz2", new Bzip2FileProvider());
+ addProvider("ftp", new FtpFileProvider());
+ addProvider("sftp", new SftpFileProvider());
+ addProvider("gzip", new GzipFileProvider());
+ addProvider("ram", new RamFileProvider());
+ setDefaultProvider(new UrlFileProvider());
+ } catch (FileSystemException e) {
+ throw new MaintenanceException("Cannot configure backup file provider", e);
+ }
+ }
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.text.DateFormat;
+
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+
+/** Purges previous backups */
+public interface BackupPurge {
+ /**
+ * Purge the backups identified by these arguments. Although these are the
+ * same fields as a {@link BackupContext} we don't pass it as argument since
+ * we want to use this interface to purge remote backups as well (that is,
+ * with a different base), or outside the scope of a running backup.
+ */
+ public void purge(FileSystemManager fileSystemManager, String base,
+ String name, DateFormat dateFormat, FileSystemOptions opts);
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backup utilities */
+public class BackupUtils {
+ /** Close a file object quietly even if it is null or throws an exception. */
+ public static void closeFOQuietly(FileObject fo) {
+ if (fo != null) {
+ try {
+ fo.close();
+ } catch (Exception e) {
+ // silent
+ }
+ }
+ }
+
+ /** Prevents instantiation */
+ private BackupUtils() {
+ }
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+@Deprecated
+class MaintenanceException extends RuntimeException {
+ private static final long serialVersionUID = -5770049663929537270L;
+
+ public MaintenanceException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public MaintenanceException(String message) {
+ super(message);
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backups a MySQL database using mysqldump. */
+public class MySqlBackup extends OsCallBackup {
+ private String mysqldumpLocation = "/usr/bin/mysqldump";
+
+ private String dbUser;
+ private String dbPassword;
+ private String dbName;
+
+ public MySqlBackup() {
+ }
+
+ public MySqlBackup(String dbUser, String dbPassword, String dbName) {
+ this.dbUser = dbUser;
+ this.dbPassword = dbPassword;
+ this.dbName = dbName;
+ init();
+ }
+
+ @Override
+ public void init() {
+ if (getName() == null)
+ setName(dbName + ".mysql");
+ super.init();
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ if (getCommand() == null)
+ setCommand(mysqldumpLocation
+ + " --lock-tables --add-locks --add-drop-table"
+ + " -u ${dbUser} --password=${dbPassword} --databases ${dbName}");
+ getVariables().put("dbUser", dbUser);
+ getVariables().put("dbPassword", dbPassword);
+ getVariables().put("dbName", dbName);
+
+ super.writeBackup(targetFo);
+ }
+
+ public void setDbUser(String dbUser) {
+ this.dbUser = dbUser;
+ }
+
+ public void setDbPassword(String dbPassword) {
+ this.dbPassword = dbPassword;
+ }
+
+ public void setDbName(String dbName) {
+ this.dbName = dbName;
+ }
+
+ public void setMysqldumpLocation(String mysqldumpLocation) {
+ this.mysqldumpLocation = mysqldumpLocation;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backups an OpenLDAP server using slapcat */
+public class OpenLdapBackup extends OsCallBackup {
+ private String slapcatLocation = "/usr/sbin/slapcat";
+ private String slapdConfLocation = "/etc/openldap/slapd.conf";
+ private String baseDn;
+
+ public OpenLdapBackup() {
+ super();
+ }
+
+ public OpenLdapBackup(String baseDn) {
+ super();
+ this.baseDn = baseDn;
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ if (baseDn == null)
+ throw new MaintenanceException("Base DN must be set");
+
+ if (getCommand() == null)
+ setCommand(slapcatLocation
+ + " -f ${slapdConfLocation} -b '${baseDn}'");
+ getVariables().put("slapdConfLocation", slapdConfLocation);
+ getVariables().put("baseDn", baseDn);
+
+ super.writeBackup(targetFo);
+ }
+
+ public void setSlapcatLocation(String slapcatLocation) {
+ this.slapcatLocation = slapcatLocation;
+ }
+
+ public void setSlapdConfLocation(String slapdConfLocation) {
+ this.slapdConfLocation = slapdConfLocation;
+ }
+
+ public void setBaseDn(String baseDn) {
+ this.baseDn = baseDn;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.io.ByteArrayOutputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.exec.CommandLine;
+import org.apache.commons.exec.DefaultExecutor;
+import org.apache.commons.exec.ExecuteException;
+import org.apache.commons.exec.ExecuteStreamHandler;
+import org.apache.commons.exec.Executor;
+import org.apache.commons.exec.PumpStreamHandler;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.vfs2.FileContent;
+import org.apache.commons.vfs2.FileObject;
+import org.argeo.api.cms.CmsLog;
+
+/**
+ * Runs an OS command and save its standard output as a file. Typically used for
+ * MySQL or OpenLDAP dumps.
+ */
+public class OsCallBackup extends AbstractAtomicBackup {
+ private final static CmsLog log = CmsLog.getLog(OsCallBackup.class);
+
+ private String command;
+ private Map<String, String> variables = new HashMap<String, String>();
+ private Executor executor = new DefaultExecutor();
+
+ private Map<String, String> environment = new HashMap<String, String>();
+
+ /** Name of the sudo user, root if "", not sudo if null */
+ private String sudo = null;
+
+ public OsCallBackup() {
+ }
+
+ public OsCallBackup(String name) {
+ super(name);
+ }
+
+ public OsCallBackup(String name, String command) {
+ super(name);
+ this.command = command;
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ String commandToUse = command;
+
+ // sudo
+ if (sudo != null) {
+ if (sudo.equals(""))
+ commandToUse = "sudo " + commandToUse;
+ else
+ commandToUse = "sudo -u " + sudo + " " + commandToUse;
+ }
+
+ CommandLine commandLine = CommandLine.parse(commandToUse, variables);
+ ByteArrayOutputStream errBos = new ByteArrayOutputStream();
+ if (log.isTraceEnabled())
+ log.trace(commandLine.toString());
+
+ try {
+ // stdout
+ FileContent targetContent = targetFo.getContent();
+ // stderr
+ ExecuteStreamHandler streamHandler = new PumpStreamHandler(targetContent.getOutputStream(), errBos);
+ executor.setStreamHandler(streamHandler);
+ executor.execute(commandLine, environment);
+ } catch (ExecuteException e) {
+ byte[] err = errBos.toByteArray();
+ String errStr = new String(err);
+ throw new MaintenanceException("Process " + commandLine + " failed (" + e.getExitValue() + "): " + errStr, e);
+ } catch (Exception e) {
+ byte[] err = errBos.toByteArray();
+ String errStr = new String(err);
+ throw new MaintenanceException("Process " + commandLine + " failed: " + errStr, e);
+ } finally {
+ IOUtils.closeQuietly(errBos);
+ }
+ }
+
+ public void setCommand(String command) {
+ this.command = command;
+ }
+
+ protected String getCommand() {
+ return command;
+ }
+
+ /**
+ * A reference to the environment variables that will be passed to the
+ * process. Empty by default.
+ */
+ protected Map<String, String> getEnvironment() {
+ return environment;
+ }
+
+ protected Map<String, String> getVariables() {
+ return variables;
+ }
+
+ public void setVariables(Map<String, String> variables) {
+ this.variables = variables;
+ }
+
+ public void setExecutor(Executor executor) {
+ this.executor = executor;
+ }
+
+ public void setSudo(String sudo) {
+ this.sudo = sudo;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backups a PostgreSQL database using pg_dump. */
+public class PostgreSqlBackup extends OsCallBackup {
+ /**
+ * PostgreSQL password environment variable (see
+ * http://stackoverflow.com/questions
+ * /2893954/how-to-pass-in-password-to-pg-dump)
+ */
+ protected final static String PGPASSWORD = "PGPASSWORD";
+
+ private String pgDumpLocation = "/usr/bin/pg_dump";
+
+ private String dbUser;
+ private String dbPassword;
+ private String dbName;
+
+ public PostgreSqlBackup() {
+ super();
+ }
+
+ public PostgreSqlBackup(String dbUser, String dbPassword, String dbName) {
+ this.dbUser = dbUser;
+ this.dbPassword = dbPassword;
+ this.dbName = dbName;
+ init();
+ }
+
+ @Override
+ public void init() {
+ // disable compression since pg_dump is used with -Fc option
+ setCompression(null);
+
+ if (getName() == null)
+ setName(dbName + ".pgdump");
+ super.init();
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ if (getCommand() == null) {
+ getEnvironment().put(PGPASSWORD, dbPassword);
+ setCommand(pgDumpLocation + " -Fc" + " -U ${dbUser} ${dbName}");
+ }
+ getVariables().put("dbUser", dbUser);
+ getVariables().put("dbPassword", dbPassword);
+ getVariables().put("dbName", dbName);
+
+ super.writeBackup(targetFo);
+ }
+
+ public void setDbUser(String dbUser) {
+ this.dbUser = dbUser;
+ }
+
+ public void setDbPassword(String dbPassword) {
+ this.dbPassword = dbPassword;
+ }
+
+ public void setDbName(String dbName) {
+ this.dbName = dbName;
+ }
+
+ public void setPgDumpLocation(String mysqldumpLocation) {
+ this.pgDumpLocation = mysqldumpLocation;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.apache.commons.vfs2.FileSystemManager;
+
+/** Simple implementation of a backup context */
+public class SimpleBackupContext implements BackupContext {
+ private DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmm");
+ private final Date timestamp;
+ private final String name;
+
+ private final FileSystemManager fileSystemManager;
+
+ public SimpleBackupContext(FileSystemManager fileSystemManager,
+ String backupsBase, String name) {
+ this.name = name;
+ this.timestamp = new Date();
+ this.fileSystemManager = fileSystemManager;
+ }
+
+ public Date getTimestamp() {
+ return timestamp;
+ }
+
+ public String getTimestampAsString() {
+ return dateFormat.format(timestamp);
+ }
+
+ public String getSystemName() {
+ return name;
+ }
+
+ public String getRelativeFolder() {
+ return name + '/' + getTimestampAsString();
+ }
+
+ public DateFormat getDateFormat() {
+ return dateFormat;
+ }
+
+ public FileSystemManager getFileSystemManager() {
+ return fileSystemManager;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.text.DateFormat;
+import java.time.Period;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.util.Date;
+import java.util.SortedMap;
+import java.util.TreeMap;
+
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+import org.apache.commons.vfs2.Selectors;
+import org.argeo.api.cms.CmsLog;
+
+/** Simple backup purge which keeps backups only for a given number of days */
+public class SimpleBackupPurge implements BackupPurge {
+ private final static CmsLog log = CmsLog.getLog(SimpleBackupPurge.class);
+
+ private Integer daysKept = 30;
+
+ @Override
+ public void purge(FileSystemManager fileSystemManager, String base, String name, DateFormat dateFormat,
+ FileSystemOptions opts) {
+ try {
+ ZonedDateTime nowDt = ZonedDateTime.now();
+ FileObject baseFo = fileSystemManager.resolveFile(base + '/' + name, opts);
+
+ SortedMap<ZonedDateTime, FileObject> toDelete = new TreeMap<ZonedDateTime, FileObject>();
+ int backupCount = 0;
+
+ // make sure base dir exists
+ baseFo.createFolder();
+
+ // scan backups and list those which should be deleted
+ for (FileObject backupFo : baseFo.getChildren()) {
+ String backupName = backupFo.getName().getBaseName();
+ Date backupDate = dateFormat.parse(backupName);
+ backupCount++;
+ ZonedDateTime backupDt = ZonedDateTime.ofInstant(backupDate.toInstant(), ZoneId.systemDefault());
+ Period sinceThen = Period.between(backupDt.toLocalDate(), nowDt.toLocalDate());
+ // new Period(backupDt, nowDt);
+ int days = sinceThen.getDays();
+ // int days = sinceThen.getMinutes();
+ if (days > daysKept) {
+ toDelete.put(backupDt, backupFo);
+ }
+ }
+
+ if (toDelete.size() != 0 && toDelete.size() == backupCount) {
+ // all backups would be deleted
+ // but we want to keep at least one
+ ZonedDateTime lastBackupDt = toDelete.firstKey();
+ FileObject keptFo = toDelete.remove(lastBackupDt);
+ log.warn("Backup " + keptFo + " kept although it is older than " + daysKept + " days.");
+ }
+
+ // delete old backups
+ for (FileObject backupFo : toDelete.values()) {
+ backupFo.delete(Selectors.SELECT_ALL);
+ if (log.isDebugEnabled())
+ log.debug("Deleted backup " + backupFo);
+ }
+ } catch (Exception e) {
+ throw new MaintenanceException("Could not purge previous backups", e);
+ }
+
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.io.File;
+
+import org.apache.commons.vfs2.FileObject;
+
+/** Backups a Subversion repository using svnadmin. */
+public class SvnBackup extends OsCallBackup {
+ private String svnadminLocation = "/usr/bin/svnadmin";
+
+ private String repoLocation;
+ private String repoName;
+
+ public SvnBackup() {
+ }
+
+ public SvnBackup(String repoLocation) {
+ this.repoLocation = repoLocation;
+ init();
+ }
+
+ @Override
+ public void init() {
+ // use directory as repo name
+ if (repoName == null)
+ repoName = new File(repoLocation).getName();
+
+ if (getName() == null)
+ setName(repoName + ".svndump");
+ super.init();
+ }
+
+ @Override
+ public void writeBackup(FileObject targetFo) {
+ if (getCommand() == null) {
+ setCommand(svnadminLocation + " dump " + " ${repoLocation}");
+ }
+ getVariables().put("repoLocation", repoLocation);
+
+ super.writeBackup(targetFo);
+ }
+
+ public void setRepoLocation(String repoLocation) {
+ this.repoLocation = repoLocation;
+ }
+
+ public void setRepoName(String repoName) {
+ this.repoName = repoName;
+ }
+
+ public void setSvnadminLocation(String mysqldumpLocation) {
+ this.svnadminLocation = mysqldumpLocation;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.backup.vfs;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.vfs2.FileObject;
+import org.apache.commons.vfs2.FileSystemException;
+import org.apache.commons.vfs2.FileSystemManager;
+import org.apache.commons.vfs2.FileSystemOptions;
+import org.apache.commons.vfs2.Selectors;
+import org.apache.commons.vfs2.UserAuthenticator;
+import org.apache.commons.vfs2.impl.DefaultFileSystemConfigBuilder;
+import org.argeo.api.cms.CmsLog;
+import org.argeo.util.LangUtils;
+
+/**
+ * Combines multiple backups and transfer them to a remote location. Purges
+ * remote and local data based on certain criteria.
+ */
+public class SystemBackup implements Runnable {
+ private final static CmsLog log = CmsLog.getLog(SystemBackup.class);
+
+ private FileSystemManager fileSystemManager;
+ private UserAuthenticator userAuthenticator = null;
+
+ private String backupsBase;
+ private String systemName;
+
+ private List<AtomicBackup> atomicBackups = new ArrayList<AtomicBackup>();
+ private BackupPurge backupPurge = new SimpleBackupPurge();
+
+ private Map<String, UserAuthenticator> remoteBases = new HashMap<String, UserAuthenticator>();
+
+ @Override
+ public void run() {
+ if (atomicBackups.size() == 0)
+ throw new MaintenanceException("No atomic backup listed");
+ List<String> failures = new ArrayList<String>();
+
+ SimpleBackupContext backupContext = new SimpleBackupContext(fileSystemManager, backupsBase, systemName);
+
+ // purge older backups
+ FileSystemOptions opts = new FileSystemOptions();
+ try {
+ DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(opts, userAuthenticator);
+ } catch (Exception e) {
+ throw new MaintenanceException("Cannot create authentication", e);
+ }
+
+ try {
+
+ backupPurge.purge(fileSystemManager, backupsBase, systemName, backupContext.getDateFormat(), opts);
+ } catch (Exception e) {
+ failures.add("Purge " + backupsBase + " failed: " + e.getMessage());
+ log.error("Purge of " + backupsBase + " failed", e);
+ }
+
+ // perform backup
+ for (AtomicBackup atomickBackup : atomicBackups) {
+ try {
+ String target = atomickBackup.backup(fileSystemManager, backupsBase, backupContext, opts);
+ if (log.isDebugEnabled())
+ log.debug("Performed backup " + target);
+ } catch (Exception e) {
+ String msg = "Atomic backup " + atomickBackup.getName() + " failed: "
+ + LangUtils.chainCausesMessages(e);
+ failures.add(msg);
+ log.error(msg);
+ if (log.isTraceEnabled())
+ log.trace("Stacktrace of atomic backup " + atomickBackup.getName() + " failure.", e);
+ }
+ }
+
+ // dispatch to remote
+ for (String remoteBase : remoteBases.keySet()) {
+ FileObject localBaseFo = null;
+ FileObject remoteBaseFo = null;
+ UserAuthenticator auth = remoteBases.get(remoteBase);
+
+ // authentication
+ FileSystemOptions remoteOpts = new FileSystemOptions();
+ try {
+ DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(remoteOpts, auth);
+ backupPurge.purge(fileSystemManager, remoteBase, systemName, backupContext.getDateFormat(), remoteOpts);
+ } catch (Exception e) {
+ failures.add("Purge " + remoteBase + " failed: " + e.getMessage());
+ log.error("Cannot purge " + remoteBase, e);
+ }
+
+ try {
+ localBaseFo = fileSystemManager.resolveFile(backupsBase + '/' + backupContext.getRelativeFolder(),
+ opts);
+ remoteBaseFo = fileSystemManager.resolveFile(remoteBase + '/' + backupContext.getRelativeFolder(),
+ remoteOpts);
+ remoteBaseFo.copyFrom(localBaseFo, Selectors.SELECT_ALL);
+ if (log.isDebugEnabled())
+ log.debug("Copied backup to " + remoteBaseFo + " from " + localBaseFo);
+ // }
+ } catch (Exception e) {
+ failures.add("Dispatch to " + remoteBase + " failed: " + e.getMessage());
+ log.error("Cannot dispatch backups from " + backupContext.getRelativeFolder() + " to " + remoteBase, e);
+ }
+ BackupUtils.closeFOQuietly(localBaseFo);
+ BackupUtils.closeFOQuietly(remoteBaseFo);
+ }
+
+ int failureCount = 0;
+ if (failures.size() > 0) {
+ StringBuffer buf = new StringBuffer();
+ for (String failure : failures) {
+ buf.append('\n').append(failureCount).append(" - ").append(failure);
+ failureCount++;
+ }
+ throw new MaintenanceException(failureCount + " error(s) when running the backup,"
+ + " check the logs and the backups as soon as possible." + buf);
+ }
+ }
+
+ public void setFileSystemManager(FileSystemManager fileSystemManager) {
+ this.fileSystemManager = fileSystemManager;
+ }
+
+ public void setBackupsBase(String backupsBase) {
+ this.backupsBase = backupsBase;
+ }
+
+ public void setSystemName(String name) {
+ this.systemName = name;
+ }
+
+ public void setAtomicBackups(List<AtomicBackup> atomicBackups) {
+ this.atomicBackups = atomicBackups;
+ }
+
+ public void setBackupPurge(BackupPurge backupPurge) {
+ this.backupPurge = backupPurge;
+ }
+
+ public void setUserAuthenticator(UserAuthenticator userAuthenticator) {
+ this.userAuthenticator = userAuthenticator;
+ }
+
+ public void setRemoteBases(Map<String, UserAuthenticator> remoteBases) {
+ this.remoteBases = remoteBases;
+ }
+
+ // public static void main(String args[]) {
+ // while (true) {
+ // try {
+ // StandardFileSystemManager fsm = new StandardFileSystemManager();
+ // fsm.init();
+ //
+ // SystemBackup systemBackup = new SystemBackup();
+ // systemBackup.setSystemName("mySystem");
+ // systemBackup
+ // .setBackupsBase("/home/mbaudier/dev/src/commons/server/runtime/org.argeo.server.core/target");
+ // systemBackup.setFileSystemManager(fsm);
+ //
+ // List<AtomicBackup> atomicBackups = new ArrayList<AtomicBackup>();
+ //
+ // MySqlBackup mySqlBackup = new MySqlBackup("root", "", "test");
+ // atomicBackups.add(mySqlBackup);
+ // PostgreSqlBackup postgreSqlBackup = new PostgreSqlBackup(
+ // "argeo", "argeo", "gis_template");
+ // atomicBackups.add(postgreSqlBackup);
+ // SvnBackup svnBackup = new SvnBackup(
+ // "/home/mbaudier/tmp/testsvnrepo");
+ // atomicBackups.add(svnBackup);
+ //
+ // systemBackup.setAtomicBackups(atomicBackups);
+ //
+ // Map<String, UserAuthenticator> remoteBases = new HashMap<String,
+ // UserAuthenticator>();
+ // StaticUserAuthenticator userAuthenticator = new StaticUserAuthenticator(
+ // null, "demo", "demo");
+ // remoteBases.put("sftp://localhost/home/mbaudier/test",
+ // userAuthenticator);
+ // systemBackup.setRemoteBases(remoteBases);
+ //
+ // systemBackup.run();
+ //
+ // fsm.close();
+ // } catch (FileSystemException e) {
+ // // TODO Auto-generated catch block
+ // e.printStackTrace();
+ // System.exit(1);
+ // }
+ //
+ // // wait
+ // try {
+ // Thread.sleep(120 * 1000);
+ // } catch (InterruptedException e) {
+ // e.printStackTrace();
+ // }
+ // }
+ // }
+}
--- /dev/null
+/** Argeo Node backup utilities based on Apache Commons VFS. */
+package org.argeo.slc.backup.vfs;
\ No newline at end of file