all: osgi
-BUNDLE_PREFIX = org.argeo
-A2_CATEGORY = org.argeo
+BUNDLE_PREFIX = org.argeo.slc
+A2_CATEGORY = org.argeo.slc
BUNDLES = \
org.argeo.slc.api \
-org.argeo.slc.build \
+org.argeo.slc.factory \
-BUILD_CLASSPATH = \
+BUILD_CLASSPATH_FEDORA = \
/usr/share/java/osgi-core/osgi.core.jar:$\
/usr/share/java/osgi-compendium/osgi.cmpn.jar:$\
/usr/share/java/ecj/ecj.jar:$\
/usr/share/java/commons-cli.jar:$\
/usr/share/java/commons-exec.jar:$\
+BUILD_CLASSPATH = \
+/usr/share/java/osgi.core.jar:$\
+/usr/share/java/osgi.cmpn.jar:$\
+/usr/share/java/ecj.jar:$\
+/usr/share/java/bndlib.jar:$\
+/usr/share/java/slf4j-api.jar:$\
+/usr/share/java/commons-io.jar:$\
+/usr/share/java/commons-cli.jar:$\
+/usr/share/java/commons-exec.jar:$\
+
+DISTRIBUTION_CLASSPATH = \
+$(SDK_BUILD_BASE)/a2/org.argeo.slc/org.argeo.slc.api.$(MAJOR).$(MINOR).jar:$\
+$(SDK_BUILD_BASE)/a2/org.argeo.slc/org.argeo.slc.factory.$(MAJOR).$(MINOR).jar:$\
+/usr/share/java/bndlib.jar:$\
+/usr/share/java/slf4j-api.jar
+
# TODO relativize from SDK_SRC_BASE
BUILD_BASE = $(SDK_BUILD_BASE)
+distribution: osgi
+ $(JVM) -cp $(DISTRIBUTION_CLASSPATH) tp/Make.java
+
#
# GENERIC
#
-JVM := /usr/lib/jvm/jre-11/bin/java
-JAVADOC := /usr/lib/jvm/jre-11/bin/javadoc
-ECJ_JAR := /usr/share/java/ecj/ecj.jar
+JVM := /usr/bin/java
+JAVADOC := /usr/bin/javadoc
+ECJ_JAR := /usr/share/java/ecj.jar
BND_TOOL := /usr/bin/bnd
WORKSPACE_BNDS := $(shell cd $(SDK_SRC_BASE) && find cnf -name '*.bnd')
mv $(dir $@)generated/*.jar $(dir $@)bundle.jar
$(BUILD_BASE)/java-compiled : $(JAVA_SRCS)
- $(JVM) -jar $(ECJ_JAR) -11 -nowarn -time -cp $(BUILD_CLASSPATH) \
+ $(JVM) -cp $(ECJ_JAR) org.eclipse.jdt.internal.compiler.batch.Main -11 -nowarn -time -cp $(BUILD_CLASSPATH) \
$(ECJ_SRCS)
touch $@
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11"/>
- <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
- <classpathentry kind="src" path="src"/>
- <classpathentry kind="output" path="bin"/>
-</classpath>
+++ /dev/null
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>org.argeo.slc.build</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.ManifestBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.pde.SchemaBuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.pde.PluginNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
+++ /dev/null
-source.. = src/
-output.. = bin/
-bin.includes = META-INF/,\
- .
-additional.bundles = org.argeo.init,\
- org.slf4j.api,\
- org.argeo.cms.tp
+++ /dev/null
-package org.argeo.slc.build;
-
-import org.argeo.slc.DefaultCategoryNameVersion;
-
-public class A2DistributionUnit extends DefaultCategoryNameVersion {
- private String originVersion;
-
-
-}
+++ /dev/null
-package org.argeo.slc.build;
-
-import static java.lang.System.Logger.Level.DEBUG;
-import static org.argeo.slc.ManifestConstants.BUNDLE_LICENSE;
-import static org.argeo.slc.ManifestConstants.BUNDLE_SYMBOLICNAME;
-import static org.argeo.slc.ManifestConstants.BUNDLE_VERSION;
-import static org.argeo.slc.ManifestConstants.EXPORT_PACKAGE;
-import static org.argeo.slc.ManifestConstants.SLC_ORIGIN_M2;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Writer;
-import java.lang.System.Logger;
-import java.lang.System.Logger.Level;
-import java.net.URL;
-import java.nio.file.DirectoryStream;
-import java.nio.file.FileSystem;
-import java.nio.file.FileSystems;
-import java.nio.file.FileVisitResult;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.PathMatcher;
-import java.nio.file.Paths;
-import java.nio.file.SimpleFileVisitor;
-import java.nio.file.attribute.BasicFileAttributes;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.TreeMap;
-import java.util.jar.Attributes;
-import java.util.jar.JarEntry;
-import java.util.jar.JarInputStream;
-import java.util.jar.JarOutputStream;
-import java.util.jar.Manifest;
-
-import org.argeo.slc.DefaultNameVersion;
-import org.argeo.slc.ManifestConstants;
-import org.argeo.slc.NameVersion;
-import org.argeo.slc.build.m2.DefaultArtifact;
-import org.argeo.slc.build.m2.MavenConventionsUtils;
-
-import aQute.bnd.osgi.Analyzer;
-import aQute.bnd.osgi.Jar;
-
-public class A2Factory {
- private final static Logger logger = System.getLogger(A2Factory.class.getName());
-
- private final static String COMMON_BND = "common.bnd";
-
- private Path originBase;
- private Path factoryBase;
-
- /** key is URI prefix, value list of base URLs */
- private Map<String, List<String>> mirrors = new HashMap<String, List<String>>();
-
- public A2Factory(Path originBase, Path factoryBase) {
- super();
- this.originBase = originBase;
- this.factoryBase = factoryBase;
-
- // TODO make it configurable
- List<String> eclipseMirrors = new ArrayList<>();
- eclipseMirrors.add("https://archive.eclipse.org/");
-
- mirrors.put("http://www.eclipse.org/downloads", eclipseMirrors);
- }
-
- public void processCategory(Path targetCategoryBase) {
- try {
- DirectoryStream<Path> bnds = Files.newDirectoryStream(targetCategoryBase,
- (p) -> p.getFileName().toString().endsWith(".bnd")
- && !p.getFileName().toString().equals(COMMON_BND));
- for (Path p : bnds) {
- processSingleM2ArtifactDistributionUnit(p);
- }
-
- DirectoryStream<Path> dus = Files.newDirectoryStream(targetCategoryBase, (p) -> Files.isDirectory(p));
- for (Path duDir : dus) {
- processM2BasedDistributionUnit(duDir);
- }
- } catch (IOException e) {
- throw new RuntimeException("Cannot process category " + targetCategoryBase, e);
- }
- }
-
- public void processSingleM2ArtifactDistributionUnit(Path bndFile) {
- try {
- String category = bndFile.getParent().getFileName().toString();
- Path targetCategoryBase = factoryBase.resolve(category);
- Properties fileProps = new Properties();
- try (InputStream in = Files.newInputStream(bndFile)) {
- fileProps.load(in);
- }
-
- String m2Coordinates = fileProps.getProperty(SLC_ORIGIN_M2.toString());
- if (m2Coordinates == null)
- throw new IllegalArgumentException("No M2 coordinates available for " + bndFile);
- DefaultArtifact artifact = new DefaultArtifact(m2Coordinates);
- URL url = MavenConventionsUtils.mavenCentralUrl(artifact);
- Path downloaded = download(url, originBase, artifact.toM2Coordinates() + ".jar");
-
- Path targetBundleDir = processBndJar(downloaded, targetCategoryBase, fileProps, artifact);
-
- downloadAndProcessM2Sources(artifact, targetBundleDir);
-
- createJar(targetBundleDir);
- } catch (Exception e) {
- throw new RuntimeException("Cannot process " + bndFile, e);
- }
- }
-
- public void processM2BasedDistributionUnit(Path duDir) {
- try {
- String category = duDir.getParent().getFileName().toString();
- Path targetCategoryBase = factoryBase.resolve(category);
- Path commonBnd = duDir.resolve(COMMON_BND);
- Properties commonProps = new Properties();
- try (InputStream in = Files.newInputStream(commonBnd)) {
- commonProps.load(in);
- }
-
- String m2Version = commonProps.getProperty(SLC_ORIGIN_M2.toString());
- if (!m2Version.startsWith(":")) {
- throw new IllegalStateException("Only the M2 version can be specified: " + m2Version);
- }
- m2Version = m2Version.substring(1);
-
- // String license = commonProps.getProperty(BUNDLE_LICENSE.toString());
-
- DirectoryStream<Path> ds = Files.newDirectoryStream(duDir,
- (p) -> p.getFileName().toString().endsWith(".bnd")
- && !p.getFileName().toString().equals(COMMON_BND));
- for (Path p : ds) {
- Properties fileProps = new Properties();
- try (InputStream in = Files.newInputStream(p)) {
- fileProps.load(in);
- }
- String m2Coordinates = fileProps.getProperty(SLC_ORIGIN_M2.toString());
- DefaultArtifact artifact = new DefaultArtifact(m2Coordinates);
-
- // temporary rewrite, for migration
- String localLicense = fileProps.getProperty(BUNDLE_LICENSE.toString());
- if (localLicense != null || artifact.getVersion() != null) {
- fileProps.remove(BUNDLE_LICENSE.toString());
- fileProps.put(SLC_ORIGIN_M2.toString(), artifact.getGroupId() + ":" + artifact.getArtifactId());
- try (Writer writer = Files.newBufferedWriter(p)) {
- for (Object key : fileProps.keySet()) {
- String value = fileProps.getProperty(key.toString());
- writer.write(key + ": " + value + '\n');
- }
- logger.log(DEBUG, () -> "Migrated " + p);
- }
- }
-
- artifact.setVersion(m2Version);
- URL url = MavenConventionsUtils.mavenCentralUrl(artifact);
- Path downloaded = download(url, originBase, artifact.toM2Coordinates() + ".jar");
-
- // prepare manifest entries
- Properties mergeProps = new Properties();
- mergeProps.putAll(commonProps);
-
- // Map<String, String> entries = new HashMap<>();
-// for (Object key : commonProps.keySet()) {
-// entries.put(key.toString(), commonProps.getProperty(key.toString()));
-// }
- fileEntries: for (Object key : fileProps.keySet()) {
- if (ManifestConstants.SLC_ORIGIN_M2.toString().equals(key))
- continue fileEntries;
- String value = fileProps.getProperty(key.toString());
- Object previousValue = mergeProps.put(key.toString(), value);
- if (previousValue != null) {
- logger.log(Level.WARNING,
- downloaded + ": " + key + " was " + previousValue + ", overridden with " + value);
- }
- }
- mergeProps.put(ManifestConstants.SLC_ORIGIN_M2.toString(), artifact.toM2Coordinates());
- Path targetBundleDir = processBndJar(downloaded, targetCategoryBase, mergeProps, artifact);
-// logger.log(Level.DEBUG, () -> "Processed " + downloaded);
-
- // sources
- downloadAndProcessM2Sources(artifact, targetBundleDir);
-
- createJar(targetBundleDir);
- }
- } catch (IOException e) {
- throw new RuntimeException("Cannot process " + duDir, e);
- }
-
- }
-
- protected void downloadAndProcessM2Sources(DefaultArtifact artifact, Path targetBundleDir) throws IOException {
- DefaultArtifact sourcesArtifact = new DefaultArtifact(artifact.toM2Coordinates(), "sources");
- URL sourcesUrl = MavenConventionsUtils.mavenCentralUrl(sourcesArtifact);
- Path sourcesDownloaded = download(sourcesUrl, originBase, artifact.toM2Coordinates() + ".sources.jar");
- processM2SourceJar(sourcesDownloaded, targetBundleDir);
- logger.log(Level.DEBUG, () -> "Processed source " + sourcesDownloaded);
-
- }
-
- protected Path processBndJar(Path downloaded, Path targetCategoryBase, Properties fileProps,
- DefaultArtifact artifact) {
-
- try {
- Map<String, String> additionalEntries = new TreeMap<>();
- boolean doNotModify = Boolean.parseBoolean(fileProps
- .getOrDefault(ManifestConstants.SLC_ORIGIN_MANIFEST_NOT_MODIFIED.toString(), "false").toString());
-
- if (doNotModify) {
- fileEntries: for (Object key : fileProps.keySet()) {
- if (ManifestConstants.SLC_ORIGIN_M2.toString().equals(key))
- continue fileEntries;
- String value = fileProps.getProperty(key.toString());
- additionalEntries.put(key.toString(), value);
- }
- } else {
- if (artifact != null) {
- if (!fileProps.containsKey(BUNDLE_SYMBOLICNAME.toString())) {
- fileProps.put(BUNDLE_SYMBOLICNAME.toString(), artifact.getName());
- }
- if (!fileProps.containsKey(BUNDLE_VERSION.toString())) {
- fileProps.put(BUNDLE_VERSION.toString(), artifact.getVersion());
- }
- }
-
- if (!fileProps.containsKey(EXPORT_PACKAGE.toString())) {
- fileProps.put(EXPORT_PACKAGE.toString(),
- "*;version=\"" + fileProps.getProperty(BUNDLE_VERSION.toString()) + "\"");
- }
-// if (!fileProps.contains(IMPORT_PACKAGE.toString())) {
-// fileProps.put(IMPORT_PACKAGE.toString(), "*");
-// }
-
- try (Analyzer bndAnalyzer = new Analyzer()) {
- bndAnalyzer.setProperties(fileProps);
- Jar jar = new Jar(downloaded.toFile());
- bndAnalyzer.setJar(jar);
- Manifest manifest = bndAnalyzer.calcManifest();
-
- keys: for (Object key : manifest.getMainAttributes().keySet()) {
- Object value = manifest.getMainAttributes().get(key);
-
- switch (key.toString()) {
- case "Tool":
- case "Bnd-LastModified":
- case "Created-By":
- continue keys;
- }
- if("Require-Capability".equals(key.toString()) && value.toString().equals("osgi.ee;filter:=\"(&(osgi.ee=JavaSE)(version=1.1))\""))
- continue keys;// hack for very old classes
- additionalEntries.put(key.toString(), value.toString());
- logger.log(DEBUG, () -> key + "=" + value);
-
- }
- }
-
-// try (Builder bndBuilder = new Builder()) {
-// Jar jar = new Jar(downloaded.toFile());
-// bndBuilder.addClasspath(jar);
-// Path targetBundleDir = targetCategoryBase.resolve(artifact.getName() + "." + artifact.getBranch());
-//
-// Jar target = new Jar(targetBundleDir.toFile());
-// bndBuilder.setJar(target);
-// return targetBundleDir;
-// }
- }
- Path targetBundleDir = processBundleJar(downloaded, targetCategoryBase, additionalEntries);
- logger.log(Level.DEBUG, () -> "Processed " + downloaded);
- return targetBundleDir;
- } catch (Exception e) {
- throw new RuntimeException("Cannot BND process " + downloaded, e);
- }
-
- }
-
- protected void processM2SourceJar(Path file, Path targetBundleDir) throws IOException {
- try (JarInputStream jarIn = new JarInputStream(Files.newInputStream(file), false)) {
- Path targetSourceDir = targetBundleDir.resolve("OSGI-OPT/src");
-
- // TODO make it less dangerous?
- if (Files.exists(targetSourceDir)) {
- deleteDirectory(targetSourceDir);
- } else {
- Files.createDirectories(targetSourceDir);
- }
-
- // copy entries
- JarEntry entry;
- entries: while ((entry = jarIn.getNextJarEntry()) != null) {
- if (entry.isDirectory())
- continue entries;
- if (entry.getName().startsWith("META-INF"))// skip META-INF entries
- continue entries;
- Path target = targetSourceDir.resolve(entry.getName());
- Files.createDirectories(target.getParent());
- Files.copy(jarIn, target);
- logger.log(Level.TRACE, () -> "Copied source " + target);
- }
- }
-
- }
-
- public void processEclipseArchive(Path duDir) {
- try {
- String category = duDir.getParent().getFileName().toString();
- Path targetCategoryBase = factoryBase.resolve(category);
- Files.createDirectories(targetCategoryBase);
- Files.createDirectories(originBase);
-
- Path commonBnd = duDir.resolve(COMMON_BND);
- Properties commonProps = new Properties();
- try (InputStream in = Files.newInputStream(commonBnd)) {
- commonProps.load(in);
- }
- Properties includes = new Properties();
- try (InputStream in = Files.newInputStream(duDir.resolve("includes.properties"))) {
- includes.load(in);
- }
- String url = commonProps.getProperty(ManifestConstants.SLC_ORIGIN_URI.toString());
- Path downloaded = tryDownload(url, originBase);
-
- FileSystem zipFs = FileSystems.newFileSystem(downloaded, null);
-
- List<PathMatcher> pathMatchers = new ArrayList<>();
- for (Object pattern : includes.keySet()) {
- PathMatcher pathMatcher = zipFs.getPathMatcher("glob:/" + pattern);
- pathMatchers.add(pathMatcher);
- }
-
- Files.walkFileTree(zipFs.getRootDirectories().iterator().next(), new SimpleFileVisitor<Path>() {
-
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- pathMatchers: for (PathMatcher pathMatcher : pathMatchers) {
- if (pathMatcher.matches(file)) {
-// Path target = targetBase.resolve(file.getFileName().toString());
-// if (!Files.exists(target)) {
-// Files.copy(file, target);
-// logger.log(Level.DEBUG, () -> "Copied " + target + " from " + downloaded);
-// } else {
-// logger.log(Level.DEBUG, () -> target + " already exists.");
-//
-// }
- if (file.getFileName().toString().contains(".source_")) {
- processEclipseSourceJar(file, targetCategoryBase);
- logger.log(Level.DEBUG, () -> "Processed source " + file);
-
- } else {
- processBundleJar(file, targetCategoryBase, new HashMap<>());
- logger.log(Level.DEBUG, () -> "Processed " + file);
- }
- continue pathMatchers;
- }
- }
- return super.visitFile(file, attrs);
- }
- });
-
- DirectoryStream<Path> dirs = Files.newDirectoryStream(targetCategoryBase, (p) -> Files.isDirectory(p));
- for (Path dir : dirs) {
- createJar(dir);
- }
- } catch (IOException e) {
- throw new RuntimeException("Cannot process " + duDir, e);
- }
-
- }
-
- protected Path processBundleJar(Path file, Path targetBase, Map<String, String> entries) throws IOException {
- NameVersion nameVersion;
- Path targetBundleDir;
- try (JarInputStream jarIn = new JarInputStream(Files.newInputStream(file), false)) {
- Manifest manifest = new Manifest(jarIn.getManifest());
-
- // remove problematic entries in MANIFEST
- manifest.getEntries().clear();
-// Set<String> entriesToDelete = new HashSet<>();
-// for (String key : manifest.getEntries().keySet()) {
-//// logger.log(DEBUG, "## " + key);
-// Attributes attrs = manifest.getAttributes(key);
-// for (Object attrName : attrs.keySet()) {
-//// logger.log(DEBUG, attrName + "=" + attrs.get(attrName));
-// if ("Specification-Version".equals(attrName.toString())
-// || "Implementation-Version".equals(attrName.toString())) {
-// entriesToDelete.add(key);
-//
-// }
-// }
-// }
-// for (String key : entriesToDelete) {
-// manifest.getEntries().remove(key);
-// }
-
- String symbolicNameFromEntries = entries.get(BUNDLE_SYMBOLICNAME.toString());
- String versionFromEntries = entries.get(BUNDLE_VERSION.toString());
-
- if (symbolicNameFromEntries != null && versionFromEntries != null) {
- nameVersion = new DefaultNameVersion(symbolicNameFromEntries, versionFromEntries);
- } else {
- nameVersion = nameVersionFromManifest(manifest);
- if (versionFromEntries != null && !nameVersion.getVersion().equals(versionFromEntries)) {
- logger.log(Level.WARNING, "Original version is " + nameVersion.getVersion()
- + " while new version is " + versionFromEntries);
- }
- }
- targetBundleDir = targetBase.resolve(nameVersion.getName() + "." + nameVersion.getBranch());
-
- // TODO make it less dangerous?
- if (Files.exists(targetBundleDir)) {
- deleteDirectory(targetBundleDir);
- }
-
- // copy entries
- JarEntry entry;
- entries: while ((entry = jarIn.getNextJarEntry()) != null) {
- if (entry.isDirectory())
- continue entries;
- if (entry.getName().endsWith(".RSA") || entry.getName().endsWith(".SF"))
- continue entries;
- Path target = targetBundleDir.resolve(entry.getName());
- Files.createDirectories(target.getParent());
- Files.copy(jarIn, target);
- logger.log(Level.TRACE, () -> "Copied " + target);
- }
-
- // copy MANIFEST
- Path manifestPath = targetBundleDir.resolve("META-INF/MANIFEST.MF");
- Files.createDirectories(manifestPath.getParent());
- for (String key : entries.keySet()) {
- String value = entries.get(key);
- Object previousValue = manifest.getMainAttributes().putValue(key, value);
- if (previousValue != null && !previousValue.equals(value)) {
- logger.log(Level.WARNING,
- file.getFileName() + ": " + key + " was " + previousValue + ", overridden with " + value);
- }
- }
- try (OutputStream out = Files.newOutputStream(manifestPath)) {
- manifest.write(out);
- }
- }
- return targetBundleDir;
- }
-
- protected void processEclipseSourceJar(Path file, Path targetBase) throws IOException {
- // NameVersion nameVersion;
- Path targetBundleDir;
- try (JarInputStream jarIn = new JarInputStream(Files.newInputStream(file), false)) {
- Manifest manifest = jarIn.getManifest();
- // nameVersion = nameVersionFromManifest(manifest);
-
- String[] relatedBundle = manifest.getMainAttributes().getValue("Eclipse-SourceBundle").split(";");
- String version = relatedBundle[1].substring("version=\"".length());
- version = version.substring(0, version.length() - 1);
- NameVersion nameVersion = new DefaultNameVersion(relatedBundle[0], version);
- targetBundleDir = targetBase.resolve(nameVersion.getName() + "." + nameVersion.getBranch());
-
- Path targetSourceDir = targetBundleDir.resolve("OSGI-OPT/src");
-
- // TODO make it less dangerous?
- if (Files.exists(targetSourceDir)) {
- deleteDirectory(targetSourceDir);
- } else {
- Files.createDirectories(targetSourceDir);
- }
-
- // copy entries
- JarEntry entry;
- entries: while ((entry = jarIn.getNextJarEntry()) != null) {
- if (entry.isDirectory())
- continue entries;
- if (entry.getName().startsWith("META-INF"))// skip META-INF entries
- continue entries;
- Path target = targetSourceDir.resolve(entry.getName());
- Files.createDirectories(target.getParent());
- Files.copy(jarIn, target);
- logger.log(Level.TRACE, () -> "Copied source " + target);
- }
-
- // copy MANIFEST
-// Path manifestPath = targetBundleDir.resolve("META-INF/MANIFEST.MF");
-// Files.createDirectories(manifestPath.getParent());
-// try (OutputStream out = Files.newOutputStream(manifestPath)) {
-// manifest.write(out);
-// }
- }
-
- }
-
- static void deleteDirectory(Path path) throws IOException {
- if (!Files.exists(path))
- return;
- Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
- @Override
- public FileVisitResult postVisitDirectory(Path directory, IOException e) throws IOException {
- if (e != null)
- throw e;
- Files.delete(directory);
- return FileVisitResult.CONTINUE;
- }
-
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- Files.delete(file);
- return FileVisitResult.CONTINUE;
- }
- });
- }
-
- protected NameVersion nameVersionFromManifest(Manifest manifest) {
- Attributes attrs = manifest.getMainAttributes();
- // symbolic name
- String symbolicName = attrs.getValue(ManifestConstants.BUNDLE_SYMBOLICNAME.toString());
- if (symbolicName == null)
- return null;
- // make sure there is no directive
- symbolicName = symbolicName.split(";")[0];
-
- String version = attrs.getValue(ManifestConstants.BUNDLE_VERSION.toString());
- return new DefaultNameVersion(symbolicName, version);
- }
-
- protected Path tryDownload(String uri, Path dir) throws IOException {
- // find mirror
- List<String> urlBases = null;
- String uriPrefix = null;
- uriPrefixes: for (String uriPref : mirrors.keySet()) {
- if (uri.startsWith(uriPref)) {
- if (mirrors.get(uriPref).size() > 0) {
- urlBases = mirrors.get(uriPref);
- uriPrefix = uriPref;
- break uriPrefixes;
- }
- }
- }
- if (urlBases == null)
- try {
- return download(new URL(uri), dir, null);
- } catch (FileNotFoundException e) {
- throw new FileNotFoundException("Cannot find " + uri);
- }
-
- // try to download
- for (String urlBase : urlBases) {
- String relativePath = uri.substring(uriPrefix.length());
- URL url = new URL(urlBase + relativePath);
- try {
- return download(url, dir, null);
- } catch (FileNotFoundException e) {
- logger.log(Level.WARNING, "Cannot download " + url + ", trying another mirror");
- }
- }
- throw new FileNotFoundException("Cannot find " + uri);
- }
-
-// protected String simplifyName(URL u) {
-// String name = u.getPath().substring(u.getPath().lastIndexOf('/') + 1);
-//
-// }
-
- protected Path download(URL url, Path dir, String name) throws IOException {
-
- Path dest;
- if (name == null) {
- name = url.getPath().substring(url.getPath().lastIndexOf('/') + 1);
- }
-
- dest = dir.resolve(name);
- if (Files.exists(dest)) {
- logger.log(Level.TRACE, () -> "File " + dest + " already exists for " + url + ", not downloading again");
- return dest;
- }
-
- try (InputStream in = url.openStream()) {
- Files.copy(in, dest);
- logger.log(Level.DEBUG, () -> "Downloaded " + dest + " from " + url);
- }
- return dest;
- }
-
- protected Path createJar(Path bundleDir) throws IOException {
- Path jarPath = bundleDir.getParent().resolve(bundleDir.getFileName() + ".jar");
- Path manifestPath = bundleDir.resolve("META-INF/MANIFEST.MF");
- Manifest manifest;
- try (InputStream in = Files.newInputStream(manifestPath)) {
- manifest = new Manifest(in);
- }
- try (JarOutputStream jarOut = new JarOutputStream(Files.newOutputStream(jarPath), manifest)) {
- Files.walkFileTree(bundleDir, new SimpleFileVisitor<Path>() {
-
- @Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- if (file.getFileName().toString().equals("MANIFEST.MF"))
- return super.visitFile(file, attrs);
- JarEntry entry = new JarEntry(bundleDir.relativize(file).toString());
- jarOut.putNextEntry(entry);
- Files.copy(file, jarOut);
- return super.visitFile(file, attrs);
- }
-
- });
- }
- deleteDirectory(bundleDir);
- return jarPath;
- }
-
- public static void main(String[] args) {
- Path originBase = Paths.get("../output/origin").toAbsolutePath().normalize();
- Path factoryBase = Paths.get("../output/a2").toAbsolutePath().normalize();
- A2Factory factory = new A2Factory(originBase, factoryBase);
-
- Path descriptorsBase = Paths.get("../tp").toAbsolutePath().normalize();
-
-// factory.processSingleM2ArtifactDistributionUnit(descriptorsBase.resolve("org.argeo.tp.apache").resolve("org.apache.xml.resolver.bnd"));
-// factory.processM2BasedDistributionUnit(descriptorsBase.resolve("org.argeo.tp/slf4j"));
-// System.exit(0);
-
- // Eclipse
- factory.processEclipseArchive(
- descriptorsBase.resolve("org.argeo.tp.eclipse.equinox").resolve("eclipse-equinox"));
- factory.processEclipseArchive(descriptorsBase.resolve("org.argeo.tp.eclipse.rap").resolve("eclipse-rap"));
- factory.processEclipseArchive(descriptorsBase.resolve("org.argeo.tp.eclipse.rcp").resolve("eclipse-rcp"));
-
- // Maven
- factory.processCategory(descriptorsBase.resolve("org.argeo.tp.sdk"));
- factory.processCategory(descriptorsBase.resolve("org.argeo.tp"));
- factory.processCategory(descriptorsBase.resolve("org.argeo.tp.apache"));
- factory.processCategory(descriptorsBase.resolve("org.argeo.tp.jetty"));
- factory.processCategory(descriptorsBase.resolve("org.argeo.tp.jcr"));
- }
-}
+++ /dev/null
-package org.argeo.slc.build.bnd;
-
-public class BndManifestFactory {
-
-}
+++ /dev/null
-package org.argeo.slc.build.m2;
-
-public interface Artifact {
- String getGroupId();
-
- String getArtifactId();
-
- String getVersion();
-
- default String getBaseVersion() {
- return getVersion();
- }
-
-// boolean isSnapshot();
-
- default String getClassifier() {
- return "";
- }
-
- default String getExtension() {
- return "jar";
- }
-
-}
+++ /dev/null
-package org.argeo.slc.build.m2;
-
-import org.argeo.slc.DefaultCategoryNameVersion;
-
-/**
- * Simple representation of an M2 artifact, not taking into account classifiers,
- * types, etc.
- */
-public class DefaultArtifact extends DefaultCategoryNameVersion implements Artifact {
- private String classifier;
-
- public DefaultArtifact(String m2coordinates) {
- this(m2coordinates, null);
- }
-
- public DefaultArtifact(String m2coordinates, String classifier) {
- String[] parts = m2coordinates.split(":");
- setCategory(parts[0]);
- setName(parts[1]);
- if (parts.length > 2) {
- setVersion(parts[2]);
- }
- this.classifier = classifier;
- }
-
- @Override
- public String getGroupId() {
- return getCategory();
- }
-
- @Override
- public String getArtifactId() {
- return getName();
- }
-
- public String toM2Coordinates() {
- return getCategory() + ":" + getName() + (getVersion() != null ? ":" + getVersion() : "");
- }
-
- public String getClassifier() {
- return classifier != null ? classifier : "";
- }
-
-}
+++ /dev/null
-package org.argeo.slc.build.m2;
-
-import java.io.File;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.Set;
-
-/**
- * Static utilities around Maven which are NOT using the Maven APIs (conventions
- * based).
- */
-public class MavenConventionsUtils {
- public final static String MAVEN_CENTRAL_BASE_URL = "https://repo1.maven.org/maven2/";
-
- /**
- * Path to the file identified by this artifact <b>without</b> using Maven APIs
- * (convention based). Default location of repository (~/.m2/repository) is used
- * here.
- *
- * @see MavenConventionsUtils#artifactToFile(String, Artifact)
- */
- public static File artifactToFile(Artifact artifact) {
- return artifactToFile(System.getProperty("user.home") + File.separator + ".m2" + File.separator + "repository",
- artifact);
- }
-
- /**
- * Path to the file identified by this artifact <b>without</b> using Maven APIs
- * (convention based).
- *
- * @param repositoryPath path to the related local repository location
- * @param artifact the artifact
- */
- public static File artifactToFile(String repositoryPath, Artifact artifact) {
- return new File(repositoryPath + File.separator + artifact.getGroupId().replace('.', File.separatorChar)
- + File.separator + artifact.getArtifactId() + File.separator + artifact.getVersion() + File.separator
- + artifactFileName(artifact)).getAbsoluteFile();
- }
-
- /** The file name of this artifact when stored */
- public static String artifactFileName(Artifact artifact) {
- return artifact.getArtifactId() + '-' + artifact.getVersion()
- + (artifact.getClassifier().equals("") ? "" : '-' + artifact.getClassifier()) + '.'
- + artifact.getExtension();
- }
-
- /** Absolute path to the file */
- public static String artifactPath(String artifactBasePath, Artifact artifact) {
- return artifactParentPath(artifactBasePath, artifact) + '/' + artifactFileName(artifact);
- }
-
- /** Absolute path to the file */
- public static String artifactUrl(String repoUrl, Artifact artifact) {
- if (repoUrl.endsWith("/"))
- return repoUrl + artifactPath("/", artifact).substring(1);
- else
- return repoUrl + artifactPath("/", artifact);
- }
-
- /** Absolute path to the file */
- public static URL mavenCentralUrl(Artifact artifact) {
- String url = artifactUrl(MAVEN_CENTRAL_BASE_URL, artifact);
- try {
- return new URL(url);
- } catch (MalformedURLException e) {
- // it should not happen
- throw new IllegalStateException(e);
- }
- }
-
- /** Absolute path to the directories where the files will be stored */
- public static String artifactParentPath(String artifactBasePath, Artifact artifact) {
- return artifactBasePath + (artifactBasePath.endsWith("/") ? "" : "/") + artifactParentPath(artifact);
- }
-
- /** Absolute path to the directory of this group */
- public static String groupPath(String artifactBasePath, String groupId) {
- return artifactBasePath + (artifactBasePath.endsWith("/") ? "" : "/") + groupId.replace('.', '/');
- }
-
- /** Relative path to the directories where the files will be stored */
- public static String artifactParentPath(Artifact artifact) {
- return artifact.getGroupId().replace('.', '/') + '/' + artifact.getArtifactId() + '/'
- + artifact.getBaseVersion();
- }
-
- public static String artifactsAsDependencyPom(Artifact pomArtifact, Set<Artifact> artifacts, Artifact parent) {
- StringBuffer p = new StringBuffer();
-
- // XML header
- p.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
- p.append(
- "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n");
- p.append("<modelVersion>4.0.0</modelVersion>\n");
-
- // Artifact
- if (parent != null) {
- p.append("<parent>\n");
- p.append("<groupId>").append(parent.getGroupId()).append("</groupId>\n");
- p.append("<artifactId>").append(parent.getArtifactId()).append("</artifactId>\n");
- p.append("<version>").append(parent.getVersion()).append("</version>\n");
- p.append("</parent>\n");
- }
- p.append("<groupId>").append(pomArtifact.getGroupId()).append("</groupId>\n");
- p.append("<artifactId>").append(pomArtifact.getArtifactId()).append("</artifactId>\n");
- p.append("<version>").append(pomArtifact.getVersion()).append("</version>\n");
- p.append("<packaging>pom</packaging>\n");
-
- // Dependencies
- p.append("<dependencies>\n");
- for (Artifact a : artifacts) {
- p.append("\t<dependency>");
- p.append("<artifactId>").append(a.getArtifactId()).append("</artifactId>");
- p.append("<groupId>").append(a.getGroupId()).append("</groupId>");
- if (!a.getExtension().equals("jar"))
- p.append("<type>").append(a.getExtension()).append("</type>");
- p.append("</dependency>\n");
- }
- p.append("</dependencies>\n");
-
- // Dependency management
- p.append("<dependencyManagement>\n");
- p.append("<dependencies>\n");
- for (Artifact a : artifacts) {
- p.append("\t<dependency>");
- p.append("<artifactId>").append(a.getArtifactId()).append("</artifactId>");
- p.append("<version>").append(a.getVersion()).append("</version>");
- p.append("<groupId>").append(a.getGroupId()).append("</groupId>");
- if (a.getExtension().equals("pom")) {
- p.append("<type>").append(a.getExtension()).append("</type>");
- p.append("<scope>import</scope>");
- }
- p.append("</dependency>\n");
- }
- p.append("</dependencies>\n");
- p.append("</dependencyManagement>\n");
-
- // Repositories
- // p.append("<repositories>\n");
- // p.append("<repository><id>argeo</id><url>http://maven.argeo.org/argeo</url></repository>\n");
- // p.append("</repositories>\n");
-
- p.append("</project>\n");
- return p.toString();
- }
-
- /** Singleton */
- private MavenConventionsUtils() {
- }
-}
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="src" output="target/classes" path="src"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-11"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>>>
+ <classpathentry kind="src" path="src"/>
<classpathentry kind="output" path="bin"/>
</classpath>
</buildCommand>
</buildSpec>
<natures>
- <nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.pde.PluginNature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
+++ /dev/null
-/MANIFEST.MF
-Import-Package: javax.jcr.nodetype,\
-org.argeo.slc.repo,\
-org.osgi.*;version=0.0.0,\
-*
-
\ No newline at end of file
--- /dev/null
+source.. = src/
+output.. = bin/
+bin.includes = META-INF/,\
+ .
+additional.bundles = org.argeo.init,\
+ org.slf4j.api,\
+ org.argeo.cms.tp
+++ /dev/null
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>org.argeo.slc</groupId>
- <artifactId>argeo-slc</artifactId>
- <version>2.3-SNAPSHOT</version>
- <relativePath>..</relativePath>
- </parent>
- <artifactId>org.argeo.slc.factory</artifactId>
- <name>SLC Factory</name>
- <dependencies>
- <!-- SLC -->
- <dependency>
- <groupId>org.argeo.slc</groupId>
- <artifactId>org.argeo.slc.runtime</artifactId>
- <version>2.3-SNAPSHOT</version>
- </dependency>
-
-
- <dependency>
- <groupId>org.argeo.slc</groupId>
- <artifactId>org.argeo.slc.repo</artifactId>
- <version>2.3-SNAPSHOT</version>
- </dependency>
-
- <dependency>
- <groupId>org.argeo.commons</groupId>
- <artifactId>org.argeo.api.cms</artifactId>
- <version>${version.argeo-commons}</version>
- </dependency>
-
- </dependencies>
-</project>
\ No newline at end of file
--- /dev/null
+package org.argeo.slc.factory;
+
+import static java.lang.System.Logger.Level.DEBUG;
+import static org.argeo.slc.ManifestConstants.BUNDLE_LICENSE;
+import static org.argeo.slc.ManifestConstants.BUNDLE_SYMBOLICNAME;
+import static org.argeo.slc.ManifestConstants.BUNDLE_VERSION;
+import static org.argeo.slc.ManifestConstants.EXPORT_PACKAGE;
+import static org.argeo.slc.ManifestConstants.SLC_ORIGIN_M2;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.Writer;
+import java.lang.System.Logger;
+import java.lang.System.Logger.Level;
+import java.net.URL;
+import java.nio.file.DirectoryStream;
+import java.nio.file.FileSystem;
+import java.nio.file.FileSystems;
+import java.nio.file.FileVisitResult;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.PathMatcher;
+import java.nio.file.Paths;
+import java.nio.file.SimpleFileVisitor;
+import java.nio.file.attribute.BasicFileAttributes;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.TreeMap;
+import java.util.jar.Attributes;
+import java.util.jar.JarEntry;
+import java.util.jar.JarInputStream;
+import java.util.jar.JarOutputStream;
+import java.util.jar.Manifest;
+
+import org.argeo.slc.DefaultNameVersion;
+import org.argeo.slc.ManifestConstants;
+import org.argeo.slc.NameVersion;
+import org.argeo.slc.factory.m2.DefaultArtifact;
+import org.argeo.slc.factory.m2.MavenConventionsUtils;
+
+import aQute.bnd.osgi.Analyzer;
+import aQute.bnd.osgi.Jar;
+
+/** The central class for A2 packaging. */
+public class A2Factory {
+ private final static Logger logger = System.getLogger(A2Factory.class.getName());
+
+ private final static String COMMON_BND = "common.bnd";
+
+ private Path originBase;
+ private Path factoryBase;
+
+ /** key is URI prefix, value list of base URLs */
+ private Map<String, List<String>> mirrors = new HashMap<String, List<String>>();
+
+ public A2Factory(Path originBase, Path factoryBase) {
+ super();
+ this.originBase = originBase;
+ this.factoryBase = factoryBase;
+
+ // TODO make it configurable
+ List<String> eclipseMirrors = new ArrayList<>();
+ eclipseMirrors.add("https://archive.eclipse.org/");
+
+ mirrors.put("http://www.eclipse.org/downloads", eclipseMirrors);
+ }
+
+ public void processCategory(Path targetCategoryBase) {
+ try {
+ DirectoryStream<Path> bnds = Files.newDirectoryStream(targetCategoryBase,
+ (p) -> p.getFileName().toString().endsWith(".bnd")
+ && !p.getFileName().toString().equals(COMMON_BND));
+ for (Path p : bnds) {
+ processSingleM2ArtifactDistributionUnit(p);
+ }
+
+ DirectoryStream<Path> dus = Files.newDirectoryStream(targetCategoryBase, (p) -> Files.isDirectory(p));
+ for (Path duDir : dus) {
+ processM2BasedDistributionUnit(duDir);
+ }
+ } catch (IOException e) {
+ throw new RuntimeException("Cannot process category " + targetCategoryBase, e);
+ }
+ }
+
+ public void processSingleM2ArtifactDistributionUnit(Path bndFile) {
+ try {
+ String category = bndFile.getParent().getFileName().toString();
+ Path targetCategoryBase = factoryBase.resolve(category);
+ Properties fileProps = new Properties();
+ try (InputStream in = Files.newInputStream(bndFile)) {
+ fileProps.load(in);
+ }
+
+ String m2Coordinates = fileProps.getProperty(SLC_ORIGIN_M2.toString());
+ if (m2Coordinates == null)
+ throw new IllegalArgumentException("No M2 coordinates available for " + bndFile);
+ DefaultArtifact artifact = new DefaultArtifact(m2Coordinates);
+ URL url = MavenConventionsUtils.mavenCentralUrl(artifact);
+ Path downloaded = download(url, originBase, artifact.toM2Coordinates() + ".jar");
+
+ Path targetBundleDir = processBndJar(downloaded, targetCategoryBase, fileProps, artifact);
+
+ downloadAndProcessM2Sources(artifact, targetBundleDir);
+
+ createJar(targetBundleDir);
+ } catch (Exception e) {
+ throw new RuntimeException("Cannot process " + bndFile, e);
+ }
+ }
+
+ public void processM2BasedDistributionUnit(Path duDir) {
+ try {
+ String category = duDir.getParent().getFileName().toString();
+ Path targetCategoryBase = factoryBase.resolve(category);
+ Path commonBnd = duDir.resolve(COMMON_BND);
+ Properties commonProps = new Properties();
+ try (InputStream in = Files.newInputStream(commonBnd)) {
+ commonProps.load(in);
+ }
+
+ String m2Version = commonProps.getProperty(SLC_ORIGIN_M2.toString());
+ if (!m2Version.startsWith(":")) {
+ throw new IllegalStateException("Only the M2 version can be specified: " + m2Version);
+ }
+ m2Version = m2Version.substring(1);
+
+ // String license = commonProps.getProperty(BUNDLE_LICENSE.toString());
+
+ DirectoryStream<Path> ds = Files.newDirectoryStream(duDir,
+ (p) -> p.getFileName().toString().endsWith(".bnd")
+ && !p.getFileName().toString().equals(COMMON_BND));
+ for (Path p : ds) {
+ Properties fileProps = new Properties();
+ try (InputStream in = Files.newInputStream(p)) {
+ fileProps.load(in);
+ }
+ String m2Coordinates = fileProps.getProperty(SLC_ORIGIN_M2.toString());
+ DefaultArtifact artifact = new DefaultArtifact(m2Coordinates);
+
+ // temporary rewrite, for migration
+ String localLicense = fileProps.getProperty(BUNDLE_LICENSE.toString());
+ if (localLicense != null || artifact.getVersion() != null) {
+ fileProps.remove(BUNDLE_LICENSE.toString());
+ fileProps.put(SLC_ORIGIN_M2.toString(), artifact.getGroupId() + ":" + artifact.getArtifactId());
+ try (Writer writer = Files.newBufferedWriter(p)) {
+ for (Object key : fileProps.keySet()) {
+ String value = fileProps.getProperty(key.toString());
+ writer.write(key + ": " + value + '\n');
+ }
+ logger.log(DEBUG, () -> "Migrated " + p);
+ }
+ }
+
+ artifact.setVersion(m2Version);
+ URL url = MavenConventionsUtils.mavenCentralUrl(artifact);
+ Path downloaded = download(url, originBase, artifact.toM2Coordinates() + ".jar");
+
+ // prepare manifest entries
+ Properties mergeProps = new Properties();
+ mergeProps.putAll(commonProps);
+
+ // Map<String, String> entries = new HashMap<>();
+// for (Object key : commonProps.keySet()) {
+// entries.put(key.toString(), commonProps.getProperty(key.toString()));
+// }
+ fileEntries: for (Object key : fileProps.keySet()) {
+ if (ManifestConstants.SLC_ORIGIN_M2.toString().equals(key))
+ continue fileEntries;
+ String value = fileProps.getProperty(key.toString());
+ Object previousValue = mergeProps.put(key.toString(), value);
+ if (previousValue != null) {
+ logger.log(Level.WARNING,
+ downloaded + ": " + key + " was " + previousValue + ", overridden with " + value);
+ }
+ }
+ mergeProps.put(ManifestConstants.SLC_ORIGIN_M2.toString(), artifact.toM2Coordinates());
+ Path targetBundleDir = processBndJar(downloaded, targetCategoryBase, mergeProps, artifact);
+// logger.log(Level.DEBUG, () -> "Processed " + downloaded);
+
+ // sources
+ downloadAndProcessM2Sources(artifact, targetBundleDir);
+
+ createJar(targetBundleDir);
+ }
+ } catch (IOException e) {
+ throw new RuntimeException("Cannot process " + duDir, e);
+ }
+
+ }
+
+ protected void downloadAndProcessM2Sources(DefaultArtifact artifact, Path targetBundleDir) throws IOException {
+ DefaultArtifact sourcesArtifact = new DefaultArtifact(artifact.toM2Coordinates(), "sources");
+ URL sourcesUrl = MavenConventionsUtils.mavenCentralUrl(sourcesArtifact);
+ Path sourcesDownloaded = download(sourcesUrl, originBase, artifact.toM2Coordinates() + ".sources.jar");
+ processM2SourceJar(sourcesDownloaded, targetBundleDir);
+ logger.log(Level.DEBUG, () -> "Processed source " + sourcesDownloaded);
+
+ }
+
+ protected Path processBndJar(Path downloaded, Path targetCategoryBase, Properties fileProps,
+ DefaultArtifact artifact) {
+
+ try {
+ Map<String, String> additionalEntries = new TreeMap<>();
+ boolean doNotModify = Boolean.parseBoolean(fileProps
+ .getOrDefault(ManifestConstants.SLC_ORIGIN_MANIFEST_NOT_MODIFIED.toString(), "false").toString());
+
+ if (doNotModify) {
+ fileEntries: for (Object key : fileProps.keySet()) {
+ if (ManifestConstants.SLC_ORIGIN_M2.toString().equals(key))
+ continue fileEntries;
+ String value = fileProps.getProperty(key.toString());
+ additionalEntries.put(key.toString(), value);
+ }
+ } else {
+ if (artifact != null) {
+ if (!fileProps.containsKey(BUNDLE_SYMBOLICNAME.toString())) {
+ fileProps.put(BUNDLE_SYMBOLICNAME.toString(), artifact.getName());
+ }
+ if (!fileProps.containsKey(BUNDLE_VERSION.toString())) {
+ fileProps.put(BUNDLE_VERSION.toString(), artifact.getVersion());
+ }
+ }
+
+ if (!fileProps.containsKey(EXPORT_PACKAGE.toString())) {
+ fileProps.put(EXPORT_PACKAGE.toString(),
+ "*;version=\"" + fileProps.getProperty(BUNDLE_VERSION.toString()) + "\"");
+ }
+// if (!fileProps.contains(IMPORT_PACKAGE.toString())) {
+// fileProps.put(IMPORT_PACKAGE.toString(), "*");
+// }
+
+ try (Analyzer bndAnalyzer = new Analyzer()) {
+ bndAnalyzer.setProperties(fileProps);
+ Jar jar = new Jar(downloaded.toFile());
+ bndAnalyzer.setJar(jar);
+ Manifest manifest = bndAnalyzer.calcManifest();
+
+ keys: for (Object key : manifest.getMainAttributes().keySet()) {
+ Object value = manifest.getMainAttributes().get(key);
+
+ switch (key.toString()) {
+ case "Tool":
+ case "Bnd-LastModified":
+ case "Created-By":
+ continue keys;
+ }
+ if ("Require-Capability".equals(key.toString())
+ && value.toString().equals("osgi.ee;filter:=\"(&(osgi.ee=JavaSE)(version=1.1))\""))
+ continue keys;// hack for very old classes
+ additionalEntries.put(key.toString(), value.toString());
+ logger.log(DEBUG, () -> key + "=" + value);
+
+ }
+ }
+
+// try (Builder bndBuilder = new Builder()) {
+// Jar jar = new Jar(downloaded.toFile());
+// bndBuilder.addClasspath(jar);
+// Path targetBundleDir = targetCategoryBase.resolve(artifact.getName() + "." + artifact.getBranch());
+//
+// Jar target = new Jar(targetBundleDir.toFile());
+// bndBuilder.setJar(target);
+// return targetBundleDir;
+// }
+ }
+ Path targetBundleDir = processBundleJar(downloaded, targetCategoryBase, additionalEntries);
+ logger.log(Level.DEBUG, () -> "Processed " + downloaded);
+ return targetBundleDir;
+ } catch (Exception e) {
+ throw new RuntimeException("Cannot BND process " + downloaded, e);
+ }
+
+ }
+
+ protected void processM2SourceJar(Path file, Path targetBundleDir) throws IOException {
+ try (JarInputStream jarIn = new JarInputStream(Files.newInputStream(file), false)) {
+ Path targetSourceDir = targetBundleDir.resolve("OSGI-OPT/src");
+
+ // TODO make it less dangerous?
+ if (Files.exists(targetSourceDir)) {
+ deleteDirectory(targetSourceDir);
+ } else {
+ Files.createDirectories(targetSourceDir);
+ }
+
+ // copy entries
+ JarEntry entry;
+ entries: while ((entry = jarIn.getNextJarEntry()) != null) {
+ if (entry.isDirectory())
+ continue entries;
+ if (entry.getName().startsWith("META-INF"))// skip META-INF entries
+ continue entries;
+ Path target = targetSourceDir.resolve(entry.getName());
+ Files.createDirectories(target.getParent());
+ Files.copy(jarIn, target);
+ logger.log(Level.TRACE, () -> "Copied source " + target);
+ }
+ }
+
+ }
+
+ public void processEclipseArchive(Path duDir) {
+ try {
+ String category = duDir.getParent().getFileName().toString();
+ Path targetCategoryBase = factoryBase.resolve(category);
+ Files.createDirectories(targetCategoryBase);
+ Files.createDirectories(originBase);
+
+ Path commonBnd = duDir.resolve(COMMON_BND);
+ Properties commonProps = new Properties();
+ try (InputStream in = Files.newInputStream(commonBnd)) {
+ commonProps.load(in);
+ }
+ Properties includes = new Properties();
+ try (InputStream in = Files.newInputStream(duDir.resolve("includes.properties"))) {
+ includes.load(in);
+ }
+ String url = commonProps.getProperty(ManifestConstants.SLC_ORIGIN_URI.toString());
+ Path downloaded = tryDownload(url, originBase);
+
+ FileSystem zipFs = FileSystems.newFileSystem(downloaded, (ClassLoader) null);
+
+ List<PathMatcher> pathMatchers = new ArrayList<>();
+ for (Object pattern : includes.keySet()) {
+ PathMatcher pathMatcher = zipFs.getPathMatcher("glob:/" + pattern);
+ pathMatchers.add(pathMatcher);
+ }
+
+ Files.walkFileTree(zipFs.getRootDirectories().iterator().next(), new SimpleFileVisitor<Path>() {
+
+ @Override
+ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+ pathMatchers: for (PathMatcher pathMatcher : pathMatchers) {
+ if (pathMatcher.matches(file)) {
+// Path target = targetBase.resolve(file.getFileName().toString());
+// if (!Files.exists(target)) {
+// Files.copy(file, target);
+// logger.log(Level.DEBUG, () -> "Copied " + target + " from " + downloaded);
+// } else {
+// logger.log(Level.DEBUG, () -> target + " already exists.");
+//
+// }
+ if (file.getFileName().toString().contains(".source_")) {
+ processEclipseSourceJar(file, targetCategoryBase);
+ logger.log(Level.DEBUG, () -> "Processed source " + file);
+
+ } else {
+ processBundleJar(file, targetCategoryBase, new HashMap<>());
+ logger.log(Level.DEBUG, () -> "Processed " + file);
+ }
+ continue pathMatchers;
+ }
+ }
+ return super.visitFile(file, attrs);
+ }
+ });
+
+ DirectoryStream<Path> dirs = Files.newDirectoryStream(targetCategoryBase, (p) -> Files.isDirectory(p));
+ for (Path dir : dirs) {
+ createJar(dir);
+ }
+ } catch (IOException e) {
+ throw new RuntimeException("Cannot process " + duDir, e);
+ }
+
+ }
+
+ protected Path processBundleJar(Path file, Path targetBase, Map<String, String> entries) throws IOException {
+ NameVersion nameVersion;
+ Path targetBundleDir;
+ try (JarInputStream jarIn = new JarInputStream(Files.newInputStream(file), false)) {
+ Manifest manifest = new Manifest(jarIn.getManifest());
+
+ // remove problematic entries in MANIFEST
+ manifest.getEntries().clear();
+// Set<String> entriesToDelete = new HashSet<>();
+// for (String key : manifest.getEntries().keySet()) {
+//// logger.log(DEBUG, "## " + key);
+// Attributes attrs = manifest.getAttributes(key);
+// for (Object attrName : attrs.keySet()) {
+//// logger.log(DEBUG, attrName + "=" + attrs.get(attrName));
+// if ("Specification-Version".equals(attrName.toString())
+// || "Implementation-Version".equals(attrName.toString())) {
+// entriesToDelete.add(key);
+//
+// }
+// }
+// }
+// for (String key : entriesToDelete) {
+// manifest.getEntries().remove(key);
+// }
+
+ String symbolicNameFromEntries = entries.get(BUNDLE_SYMBOLICNAME.toString());
+ String versionFromEntries = entries.get(BUNDLE_VERSION.toString());
+
+ if (symbolicNameFromEntries != null && versionFromEntries != null) {
+ nameVersion = new DefaultNameVersion(symbolicNameFromEntries, versionFromEntries);
+ } else {
+ nameVersion = nameVersionFromManifest(manifest);
+ if (versionFromEntries != null && !nameVersion.getVersion().equals(versionFromEntries)) {
+ logger.log(Level.WARNING, "Original version is " + nameVersion.getVersion()
+ + " while new version is " + versionFromEntries);
+ }
+ }
+ targetBundleDir = targetBase.resolve(nameVersion.getName() + "." + nameVersion.getBranch());
+
+ // TODO make it less dangerous?
+ if (Files.exists(targetBundleDir)) {
+ deleteDirectory(targetBundleDir);
+ }
+
+ // copy entries
+ JarEntry entry;
+ entries: while ((entry = jarIn.getNextJarEntry()) != null) {
+ if (entry.isDirectory())
+ continue entries;
+ if (entry.getName().endsWith(".RSA") || entry.getName().endsWith(".SF"))
+ continue entries;
+ Path target = targetBundleDir.resolve(entry.getName());
+ Files.createDirectories(target.getParent());
+ Files.copy(jarIn, target);
+ logger.log(Level.TRACE, () -> "Copied " + target);
+ }
+
+ // copy MANIFEST
+ Path manifestPath = targetBundleDir.resolve("META-INF/MANIFEST.MF");
+ Files.createDirectories(manifestPath.getParent());
+ for (String key : entries.keySet()) {
+ String value = entries.get(key);
+ Object previousValue = manifest.getMainAttributes().putValue(key, value);
+ if (previousValue != null && !previousValue.equals(value)) {
+ logger.log(Level.WARNING,
+ file.getFileName() + ": " + key + " was " + previousValue + ", overridden with " + value);
+ }
+ }
+ try (OutputStream out = Files.newOutputStream(manifestPath)) {
+ manifest.write(out);
+ }
+ }
+ return targetBundleDir;
+ }
+
+ protected void processEclipseSourceJar(Path file, Path targetBase) throws IOException {
+ // NameVersion nameVersion;
+ Path targetBundleDir;
+ try (JarInputStream jarIn = new JarInputStream(Files.newInputStream(file), false)) {
+ Manifest manifest = jarIn.getManifest();
+ // nameVersion = nameVersionFromManifest(manifest);
+
+ String[] relatedBundle = manifest.getMainAttributes().getValue("Eclipse-SourceBundle").split(";");
+ String version = relatedBundle[1].substring("version=\"".length());
+ version = version.substring(0, version.length() - 1);
+ NameVersion nameVersion = new DefaultNameVersion(relatedBundle[0], version);
+ targetBundleDir = targetBase.resolve(nameVersion.getName() + "." + nameVersion.getBranch());
+
+ Path targetSourceDir = targetBundleDir.resolve("OSGI-OPT/src");
+
+ // TODO make it less dangerous?
+ if (Files.exists(targetSourceDir)) {
+ deleteDirectory(targetSourceDir);
+ } else {
+ Files.createDirectories(targetSourceDir);
+ }
+
+ // copy entries
+ JarEntry entry;
+ entries: while ((entry = jarIn.getNextJarEntry()) != null) {
+ if (entry.isDirectory())
+ continue entries;
+ if (entry.getName().startsWith("META-INF"))// skip META-INF entries
+ continue entries;
+ Path target = targetSourceDir.resolve(entry.getName());
+ Files.createDirectories(target.getParent());
+ Files.copy(jarIn, target);
+ logger.log(Level.TRACE, () -> "Copied source " + target);
+ }
+
+ // copy MANIFEST
+// Path manifestPath = targetBundleDir.resolve("META-INF/MANIFEST.MF");
+// Files.createDirectories(manifestPath.getParent());
+// try (OutputStream out = Files.newOutputStream(manifestPath)) {
+// manifest.write(out);
+// }
+ }
+
+ }
+
+ static void deleteDirectory(Path path) throws IOException {
+ if (!Files.exists(path))
+ return;
+ Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
+ @Override
+ public FileVisitResult postVisitDirectory(Path directory, IOException e) throws IOException {
+ if (e != null)
+ throw e;
+ Files.delete(directory);
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override
+ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+ Files.delete(file);
+ return FileVisitResult.CONTINUE;
+ }
+ });
+ }
+
+ protected NameVersion nameVersionFromManifest(Manifest manifest) {
+ Attributes attrs = manifest.getMainAttributes();
+ // symbolic name
+ String symbolicName = attrs.getValue(ManifestConstants.BUNDLE_SYMBOLICNAME.toString());
+ if (symbolicName == null)
+ return null;
+ // make sure there is no directive
+ symbolicName = symbolicName.split(";")[0];
+
+ String version = attrs.getValue(ManifestConstants.BUNDLE_VERSION.toString());
+ return new DefaultNameVersion(symbolicName, version);
+ }
+
+ protected Path tryDownload(String uri, Path dir) throws IOException {
+ // find mirror
+ List<String> urlBases = null;
+ String uriPrefix = null;
+ uriPrefixes: for (String uriPref : mirrors.keySet()) {
+ if (uri.startsWith(uriPref)) {
+ if (mirrors.get(uriPref).size() > 0) {
+ urlBases = mirrors.get(uriPref);
+ uriPrefix = uriPref;
+ break uriPrefixes;
+ }
+ }
+ }
+ if (urlBases == null)
+ try {
+ return download(new URL(uri), dir, null);
+ } catch (FileNotFoundException e) {
+ throw new FileNotFoundException("Cannot find " + uri);
+ }
+
+ // try to download
+ for (String urlBase : urlBases) {
+ String relativePath = uri.substring(uriPrefix.length());
+ URL url = new URL(urlBase + relativePath);
+ try {
+ return download(url, dir, null);
+ } catch (FileNotFoundException e) {
+ logger.log(Level.WARNING, "Cannot download " + url + ", trying another mirror");
+ }
+ }
+ throw new FileNotFoundException("Cannot find " + uri);
+ }
+
+// protected String simplifyName(URL u) {
+// String name = u.getPath().substring(u.getPath().lastIndexOf('/') + 1);
+//
+// }
+
+ protected Path download(URL url, Path dir, String name) throws IOException {
+
+ Path dest;
+ if (name == null) {
+ name = url.getPath().substring(url.getPath().lastIndexOf('/') + 1);
+ }
+
+ dest = dir.resolve(name);
+ if (Files.exists(dest)) {
+ logger.log(Level.TRACE, () -> "File " + dest + " already exists for " + url + ", not downloading again");
+ return dest;
+ }
+
+ try (InputStream in = url.openStream()) {
+ Files.copy(in, dest);
+ logger.log(Level.DEBUG, () -> "Downloaded " + dest + " from " + url);
+ }
+ return dest;
+ }
+
+ protected Path createJar(Path bundleDir) throws IOException {
+ Path jarPath = bundleDir.getParent().resolve(bundleDir.getFileName() + ".jar");
+ Path manifestPath = bundleDir.resolve("META-INF/MANIFEST.MF");
+ Manifest manifest;
+ try (InputStream in = Files.newInputStream(manifestPath)) {
+ manifest = new Manifest(in);
+ }
+ try (JarOutputStream jarOut = new JarOutputStream(Files.newOutputStream(jarPath), manifest)) {
+ Files.walkFileTree(bundleDir, new SimpleFileVisitor<Path>() {
+
+ @Override
+ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+ if (file.getFileName().toString().equals("MANIFEST.MF"))
+ return super.visitFile(file, attrs);
+ JarEntry entry = new JarEntry(bundleDir.relativize(file).toString());
+ jarOut.putNextEntry(entry);
+ Files.copy(file, jarOut);
+ return super.visitFile(file, attrs);
+ }
+
+ });
+ }
+ deleteDirectory(bundleDir);
+ return jarPath;
+ }
+
+ public static void main(String[] args) {
+ Path originBase = Paths.get("../output/origin").toAbsolutePath().normalize();
+ Path factoryBase = Paths.get("../output/a2").toAbsolutePath().normalize();
+ A2Factory factory = new A2Factory(originBase, factoryBase);
+
+ Path descriptorsBase = Paths.get("../tp").toAbsolutePath().normalize();
+
+// factory.processSingleM2ArtifactDistributionUnit(descriptorsBase.resolve("org.argeo.tp.apache").resolve("org.apache.xml.resolver.bnd"));
+// factory.processM2BasedDistributionUnit(descriptorsBase.resolve("org.argeo.tp/slf4j"));
+// System.exit(0);
+
+ // Eclipse
+ factory.processEclipseArchive(
+ descriptorsBase.resolve("org.argeo.tp.eclipse.equinox").resolve("eclipse-equinox"));
+ factory.processEclipseArchive(descriptorsBase.resolve("org.argeo.tp.eclipse.rap").resolve("eclipse-rap"));
+ factory.processEclipseArchive(descriptorsBase.resolve("org.argeo.tp.eclipse.rcp").resolve("eclipse-rcp"));
+
+ // Maven
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp.sdk"));
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp"));
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp.apache"));
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp.jetty"));
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp.jcr"));
+ }
+}
--- /dev/null
+package org.argeo.slc.factory.m2;
+
+public interface Artifact {
+ String getGroupId();
+
+ String getArtifactId();
+
+ String getVersion();
+
+ default String getBaseVersion() {
+ return getVersion();
+ }
+
+// boolean isSnapshot();
+
+ default String getClassifier() {
+ return "";
+ }
+
+ default String getExtension() {
+ return "jar";
+ }
+
+}
--- /dev/null
+package org.argeo.slc.factory.m2;
+
+import org.argeo.slc.DefaultCategoryNameVersion;
+
+/**
+ * Simple representation of an M2 artifact, not taking into account classifiers,
+ * types, etc.
+ */
+public class DefaultArtifact extends DefaultCategoryNameVersion implements Artifact {
+ private String classifier;
+
+ public DefaultArtifact(String m2coordinates) {
+ this(m2coordinates, null);
+ }
+
+ public DefaultArtifact(String m2coordinates, String classifier) {
+ String[] parts = m2coordinates.split(":");
+ setCategory(parts[0]);
+ setName(parts[1]);
+ if (parts.length > 2) {
+ setVersion(parts[2]);
+ }
+ this.classifier = classifier;
+ }
+
+ @Override
+ public String getGroupId() {
+ return getCategory();
+ }
+
+ @Override
+ public String getArtifactId() {
+ return getName();
+ }
+
+ public String toM2Coordinates() {
+ return getCategory() + ":" + getName() + (getVersion() != null ? ":" + getVersion() : "");
+ }
+
+ public String getClassifier() {
+ return classifier != null ? classifier : "";
+ }
+
+}
--- /dev/null
+package org.argeo.slc.factory.m2;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Set;
+
+/**
+ * Static utilities around Maven which are NOT using the Maven APIs (conventions
+ * based).
+ */
+public class MavenConventionsUtils {
+ public final static String MAVEN_CENTRAL_BASE_URL = "https://repo1.maven.org/maven2/";
+
+ /**
+ * Path to the file identified by this artifact <b>without</b> using Maven APIs
+ * (convention based). Default location of repository (~/.m2/repository) is used
+ * here.
+ *
+ * @see MavenConventionsUtils#artifactToFile(String, Artifact)
+ */
+ public static File artifactToFile(Artifact artifact) {
+ return artifactToFile(System.getProperty("user.home") + File.separator + ".m2" + File.separator + "repository",
+ artifact);
+ }
+
+ /**
+ * Path to the file identified by this artifact <b>without</b> using Maven APIs
+ * (convention based).
+ *
+ * @param repositoryPath path to the related local repository location
+ * @param artifact the artifact
+ */
+ public static File artifactToFile(String repositoryPath, Artifact artifact) {
+ return new File(repositoryPath + File.separator + artifact.getGroupId().replace('.', File.separatorChar)
+ + File.separator + artifact.getArtifactId() + File.separator + artifact.getVersion() + File.separator
+ + artifactFileName(artifact)).getAbsoluteFile();
+ }
+
+ /** The file name of this artifact when stored */
+ public static String artifactFileName(Artifact artifact) {
+ return artifact.getArtifactId() + '-' + artifact.getVersion()
+ + (artifact.getClassifier().equals("") ? "" : '-' + artifact.getClassifier()) + '.'
+ + artifact.getExtension();
+ }
+
+ /** Absolute path to the file */
+ public static String artifactPath(String artifactBasePath, Artifact artifact) {
+ return artifactParentPath(artifactBasePath, artifact) + '/' + artifactFileName(artifact);
+ }
+
+ /** Absolute path to the file */
+ public static String artifactUrl(String repoUrl, Artifact artifact) {
+ if (repoUrl.endsWith("/"))
+ return repoUrl + artifactPath("/", artifact).substring(1);
+ else
+ return repoUrl + artifactPath("/", artifact);
+ }
+
+ /** Absolute path to the file */
+ public static URL mavenCentralUrl(Artifact artifact) {
+ String url = artifactUrl(MAVEN_CENTRAL_BASE_URL, artifact);
+ try {
+ return new URL(url);
+ } catch (MalformedURLException e) {
+ // it should not happen
+ throw new IllegalStateException(e);
+ }
+ }
+
+ /** Absolute path to the directories where the files will be stored */
+ public static String artifactParentPath(String artifactBasePath, Artifact artifact) {
+ return artifactBasePath + (artifactBasePath.endsWith("/") ? "" : "/") + artifactParentPath(artifact);
+ }
+
+ /** Absolute path to the directory of this group */
+ public static String groupPath(String artifactBasePath, String groupId) {
+ return artifactBasePath + (artifactBasePath.endsWith("/") ? "" : "/") + groupId.replace('.', '/');
+ }
+
+ /** Relative path to the directories where the files will be stored */
+ public static String artifactParentPath(Artifact artifact) {
+ return artifact.getGroupId().replace('.', '/') + '/' + artifact.getArtifactId() + '/'
+ + artifact.getBaseVersion();
+ }
+
+ public static String artifactsAsDependencyPom(Artifact pomArtifact, Set<Artifact> artifacts, Artifact parent) {
+ StringBuffer p = new StringBuffer();
+
+ // XML header
+ p.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
+ p.append(
+ "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n");
+ p.append("<modelVersion>4.0.0</modelVersion>\n");
+
+ // Artifact
+ if (parent != null) {
+ p.append("<parent>\n");
+ p.append("<groupId>").append(parent.getGroupId()).append("</groupId>\n");
+ p.append("<artifactId>").append(parent.getArtifactId()).append("</artifactId>\n");
+ p.append("<version>").append(parent.getVersion()).append("</version>\n");
+ p.append("</parent>\n");
+ }
+ p.append("<groupId>").append(pomArtifact.getGroupId()).append("</groupId>\n");
+ p.append("<artifactId>").append(pomArtifact.getArtifactId()).append("</artifactId>\n");
+ p.append("<version>").append(pomArtifact.getVersion()).append("</version>\n");
+ p.append("<packaging>pom</packaging>\n");
+
+ // Dependencies
+ p.append("<dependencies>\n");
+ for (Artifact a : artifacts) {
+ p.append("\t<dependency>");
+ p.append("<artifactId>").append(a.getArtifactId()).append("</artifactId>");
+ p.append("<groupId>").append(a.getGroupId()).append("</groupId>");
+ if (!a.getExtension().equals("jar"))
+ p.append("<type>").append(a.getExtension()).append("</type>");
+ p.append("</dependency>\n");
+ }
+ p.append("</dependencies>\n");
+
+ // Dependency management
+ p.append("<dependencyManagement>\n");
+ p.append("<dependencies>\n");
+ for (Artifact a : artifacts) {
+ p.append("\t<dependency>");
+ p.append("<artifactId>").append(a.getArtifactId()).append("</artifactId>");
+ p.append("<version>").append(a.getVersion()).append("</version>");
+ p.append("<groupId>").append(a.getGroupId()).append("</groupId>");
+ if (a.getExtension().equals("pom")) {
+ p.append("<type>").append(a.getExtension()).append("</type>");
+ p.append("<scope>import</scope>");
+ }
+ p.append("</dependency>\n");
+ }
+ p.append("</dependencies>\n");
+ p.append("</dependencyManagement>\n");
+
+ // Repositories
+ // p.append("<repositories>\n");
+ // p.append("<repository><id>argeo</id><url>http://maven.argeo.org/argeo</url></repository>\n");
+ // p.append("</repositories>\n");
+
+ p.append("</project>\n");
+ return p.toString();
+ }
+
+ /** Singleton */
+ private MavenConventionsUtils() {
+ }
+}
+++ /dev/null
-package org.argeo.slc.rpmfactory;
-
-import java.io.File;
-import java.util.List;
-
-import javax.jcr.Node;
-
-/**
- * Defines a build environment. This information is typically used by other
- * components performing the various actions related to RPM build.
- */
-public interface RpmFactory {
- //
- // DIRECT ACTIONS ON JCR REPOSITORY
- //
- public void indexWorkspace(String workspace);
-
- public Node newDistribution(String distributionId);
-
- //
- // CONFIG FILES GENERATION
- //
- /** Creates a mock config file. */
- public File getMockConfigFile(String arch, String branch);
-
- /** Creates a yum config file. */
- public File getYumRepoFile(String arch);
-
- //
- // WORKSPACES
- //
- public String getStagingWorkspace();
-
- /**
- * @return the name of the testing workspace, or null if and only if the
- * testing workspace was not enabled.
- */
- public String getTestingWorkspace();
-
- public String getStableWorkspace();
-
- public File getWorkspaceDir(String workspace);
-
- //
- // ARCH DEPENDENT INFOS
- //
- public List<String> getArchs();
-
- public String getMockConfig(String arch);
-
- public String getIdWithArch(String arch);
-
- public File getResultDir(String arch);
-
- //
- // DEPLOYMENT
- //
- public String getGitBaseUrl();
-
- public Boolean isDeveloperInstance();
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory;
-
-import org.argeo.jcr.proxy.ResourceProxy;
-
-/** Marker interface (useful for OSGi services references), may be extended later */
-public interface RpmProxyService extends ResourceProxy {
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory;
-
-/** A YUM compatible repository of RPM packages. */
-public interface RpmRepository {
- public String getId();
-
- public String getUrl();
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import org.argeo.slc.rpmfactory.RpmRepository;
-
-/** Common method to RPM repositories. */
-public abstract class AbstractRpmRepository implements RpmRepository {
- private String id;
- private String url;
-
- @Override
- public String getId() {
- return id;
- }
-
- @Override
- public String getUrl() {
- return url;
- }
-
- public void setId(String id) {
- this.id = id;
- }
-
- public void setUrl(String url) {
- this.url = url;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.apache.commons.exec.Executor;
-import org.apache.commons.io.FileUtils;
-import org.argeo.api.cms.CmsLog;
-import org.argeo.slc.SlcException;
-import org.argeo.slc.rpmfactory.RpmFactory;
-import org.argeo.slc.runtime.tasks.SystemCall;
-
-/** Build an RPM in mock. */
-public class BuildInMock implements Runnable {
- private final static CmsLog log = CmsLog.getLog(BuildInMock.class);
- private final static String NOARCH = "noarch";
-
- private String rpmPackage = null;
- private String branch = null;
- private String arch = NOARCH;
-
- private RpmFactory rpmFactory;
- private Executor executor;
-
- private String debuginfoDirName = "debuginfo";
- private String mockExecutable = "/usr/bin/mock";
-
- private List<String> preBuildCommands = new ArrayList<String>();
-
- public void run() {
- if (!rpmFactory.isDeveloperInstance()) {
- // clean/init
- SystemCall mockClean = createBaseMockCall();
- mockClean.arg("--init");
- mockClean.run();
- }
-
- // pre build commands
- for (String preBuildCmd : preBuildCommands) {
- SystemCall mockClean = createBaseMockCall();
- mockClean.arg("--chroot").arg(preBuildCmd);
- mockClean.run();
- }
-
- // actual build
- SystemCall mockBuild = createBaseMockCall();
- mockBuild.arg("--scm-enable");
- mockBuild.arg("--scm-option").arg("package=" + rpmPackage);
- mockBuild.arg("--no-clean");
- //
- //
- mockBuild.run();
- //
-
- // copy RPMs to target directories
- File stagingDir = rpmFactory.getWorkspaceDir(rpmFactory
- .getStagingWorkspace());
- File srpmDir = new File(stagingDir, "SRPMS");
- srpmDir.mkdirs();
- File archDir = null;
- File debuginfoDir = null;
- if (!arch.equals(NOARCH)) {
- archDir = new File(stagingDir, arch);
- debuginfoDir = new File(archDir, debuginfoDirName);
- debuginfoDir.mkdirs();
- }
-
- Set<File> reposToRecreate = new HashSet<File>();
- File resultDir = rpmFactory.getResultDir(arch);
- if (resultDir.exists())
- rpms: for (File file : resultDir.listFiles()) {
- if (file.isDirectory())
- continue rpms;
-
- File[] targetDirs;
- if (file.getName().contains(".src.rpm"))
- targetDirs = new File[] { srpmDir };
- else if (file.getName().contains("-debuginfo-"))
- targetDirs = new File[] { debuginfoDir };
- else if (!arch.equals(NOARCH)
- && file.getName().contains("." + arch + ".rpm"))
- targetDirs = new File[] { archDir };
- else if (file.getName().contains(".noarch.rpm")) {
- List<File> dirs = new ArrayList<File>();
- for (String arch : rpmFactory.getArchs())
- dirs.add(new File(stagingDir, arch));
- targetDirs = dirs.toArray(new File[dirs.size()]);
- } else if (file.getName().contains(".rpm"))
- throw new SlcException("Don't know where to copy " + file);
- else {
- if (log.isTraceEnabled())
- log.trace("Skip " + file);
- continue rpms;
- }
-
- reposToRecreate.addAll(Arrays.asList(targetDirs));
- copyToDirs(file, targetDirs);
- }
-
- // recreate changed repos
- for (File repoToRecreate : reposToRecreate) {
- SystemCall createrepo = new SystemCall();
- createrepo.arg("createrepo");
- // sqllite db
- createrepo.arg("-d");
- // debuginfo
- if (!repoToRecreate.getName().equals(debuginfoDirName))
- createrepo.arg("-x").arg(debuginfoDirName + "/*");
- // quiet
- createrepo.arg("-q");
- createrepo.arg(repoToRecreate.getAbsolutePath());
-
- createrepo.setExecutor(executor);
- createrepo.run();
- log.info("Updated repo " + repoToRecreate);
- }
-
- // index staging workspace
- rpmFactory.indexWorkspace(rpmFactory.getStagingWorkspace());
- }
-
- /** Creates a mock call with all the common options such as config file etc. */
- protected SystemCall createBaseMockCall() {
- String mockCfg = rpmFactory.getMockConfig(arch);
- File mockConfigFile = rpmFactory.getMockConfigFile(arch, branch);
-
- // prepare mock call
- SystemCall mock = new SystemCall();
-
- if (arch != null)
- mock.arg("setarch").arg(arch);
- mock.arg(mockExecutable);
- mock.arg("-v");
- mock.arg("--configdir=" + mockConfigFile.getAbsoluteFile().getParent());
- if (arch != null)
- mock.arg("--arch=" + arch);
- mock.arg("-r").arg(mockCfg);
-
- mock.setLogCommand(true);
- mock.setExecutor(executor);
-
- return mock;
- }
-
- protected void copyToDirs(File file, File[] dirs) {
- for (File dir : dirs) {
- try {
- FileUtils.copyFileToDirectory(file, dir);
- if (log.isDebugEnabled())
- log.debug(file + " => " + dir);
- } catch (IOException e) {
- throw new SlcException("Cannot copy " + file + " to " + dir, e);
- }
- }
- }
-
- public void setArch(String arch) {
- this.arch = arch;
- }
-
- public void setRpmPackage(String rpmPackage) {
- this.rpmPackage = rpmPackage;
- }
-
- public void setBranch(String branch) {
- this.branch = branch;
- }
-
- public void setRpmFactory(RpmFactory env) {
- this.rpmFactory = env;
- }
-
- public void setExecutor(Executor executor) {
- this.executor = executor;
- }
-
- public void setMockExecutable(String mockExecutable) {
- this.mockExecutable = mockExecutable;
- }
-
- public void setPreBuildCommands(List<String> preBuildCommands) {
- this.preBuildCommands = preBuildCommands;
- }
-
-}
\ No newline at end of file
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import java.io.File;
-import java.io.InputStream;
-import java.net.URL;
-import java.util.StringTokenizer;
-
-import javax.jcr.Node;
-import javax.jcr.Session;
-import javax.jcr.nodetype.NodeType;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.io.IOUtils;
-import org.argeo.api.cms.CmsLog;
-import org.argeo.jcr.JcrUtils;
-import org.argeo.slc.SlcException;
-import org.argeo.slc.rpmfactory.RpmFactory;
-import org.argeo.slc.runtime.tasks.SystemCall;
-
-/**
- * Gather RPMs from various sources (local builds or third party) into a
- * consistent distributable set (typically to be used to generate an ISO).
- */
-public class CreateRpmDistribution implements Runnable {
- private final static CmsLog log = CmsLog
- .getLog(CreateRpmDistribution.class);
-
- private RpmFactory rpmFactory;
- private RpmDistribution rpmDistribution;
-
- private String arch = "x86_64";
-
- private String repoqueryExecutable = "/usr/bin/repoquery";
-
- @Override
- public void run() {
- Session session = null;
- // Reader reader = null;
- try {
- Node baseFolder = rpmFactory.newDistribution(rpmDistribution
- .getId());
- session = baseFolder.getSession();
- Node targetFolder = baseFolder.addNode(arch, NodeType.NT_FOLDER);
-
- SystemCall repoquery = new SystemCall();
- repoquery.arg(repoqueryExecutable);
-
- File yumConfigFile = rpmFactory.getYumRepoFile(arch);
- repoquery.arg("-c", yumConfigFile.getAbsolutePath());
- repoquery.arg("--requires");
- repoquery.arg("--resolve");
- repoquery.arg("--location");
- repoquery.arg("--archlist=" + arch);
-
- for (String rpmPackage : rpmDistribution.getPackages())
- repoquery.arg(rpmPackage);
-
- if (log.isDebugEnabled())
- log.debug("Command:\n" + repoquery.asCommand());
-
- String output = repoquery.function();
-
- if (log.isDebugEnabled())
- log.debug(output + "\n");
- // reader = new StringReader(output);
- StringTokenizer lines = new StringTokenizer(output, "\n");
- // List<String> dependencies = IOUtils.readLines(reader);
- dependencies: while (lines.hasMoreTokens()) {
- String urlStr = lines.nextToken();
- InputStream in = null;
- try {
- URL url = new URL(urlStr);
- String fileName = FilenameUtils.getName(url.getFile());
- String[] tokens = fileName.split("-");
- if (tokens.length < 3)
- continue dependencies;
- StringBuilder buf = new StringBuilder();
- for (int i = 0; i < tokens.length - 2; i++) {
- if (i != 0)
- buf.append('-');
- buf.append(tokens[i]);
-
- }
- String packageName = buf.toString();
- for (RpmPackageSet excluded : rpmDistribution
- .getExcludedPackages()) {
- if (excluded.contains(packageName)) {
- if (log.isDebugEnabled())
- log.debug("Skipped " + packageName);
- continue dependencies;// skip
- }
- }
- in = url.openStream();
- JcrUtils.copyStreamAsFile(targetFolder, fileName, in);
- targetFolder.getSession().save();
- if (log.isDebugEnabled())
- log.debug("Copied " + packageName);
- } catch (Exception e) {
- log.error("Cannot copy " + urlStr, e);
- } finally {
- IOUtils.closeQuietly(in);
- }
- }
-
- // createrepo
- File workspaceDir = rpmFactory.getWorkspaceDir(rpmDistribution
- .getId());
- SystemCall createrepo = new SystemCall();
- createrepo.arg("createrepo");
- createrepo.arg("-q");
- createrepo.arg("-d");
- File archDir = new File(workspaceDir.getPath()
- + targetFolder.getPath());
- createrepo.arg(archDir.getAbsolutePath());
- createrepo.run();
- } catch (Exception e) {
- throw new SlcException("Cannot generate distribution "
- + rpmDistribution.getId(), e);
- } finally {
- JcrUtils.logoutQuietly(session);
- // IOUtils.closeQuietly(reader);
- }
- }
-
- public void setRpmDistribution(RpmDistribution rpmDistribution) {
- this.rpmDistribution = rpmDistribution;
- }
-
- public void setRpmFactory(RpmFactory rpmFactory) {
- this.rpmFactory = rpmFactory;
- }
-
- public void setArch(String arch) {
- this.arch = arch;
- }
-
- public void setRepoqueryExecutable(String yumdownloaderExecutable) {
- this.repoqueryExecutable = yumdownloaderExecutable;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.exec.Executor;
-import org.apache.commons.io.FileUtils;
-import org.argeo.api.cms.CmsLog;
-import org.argeo.slc.SlcException;
-import org.argeo.slc.rpmfactory.RpmFactory;
-import org.argeo.slc.runtime.tasks.SystemCall;
-
-/** Releases the content of staging to a public repository. */
-public class ReleaseStaging implements Runnable {
- private final static CmsLog log = CmsLog.getLog(ReleaseStaging.class);
-
- private RpmFactory rpmFactory;
- private Executor executor;
-
- private String debuginfoDirName = "debuginfo";
-
- @Override
- public void run() {
- String sourceWorkspace = rpmFactory.getStagingWorkspace();
- File sourceRepoDir = rpmFactory.getWorkspaceDir(sourceWorkspace);
- String targetWorkspace = rpmFactory.getTestingWorkspace() != null ? rpmFactory
- .getTestingWorkspace() : rpmFactory.getStableWorkspace();
- File targetRepoDir = rpmFactory.getWorkspaceDir(targetWorkspace);
- List<File> reposToRecreate = new ArrayList<File>();
-
- stagingChildren: for (File dir : sourceRepoDir.listFiles()) {
- if (!dir.isDirectory())
- continue stagingChildren;
- if (dir.getName().equals("lost+found"))
- continue stagingChildren;
-
- File targetDir = new File(targetRepoDir, dir.getName());
- try {
- FileUtils.copyDirectory(dir, targetDir);
- if (log.isDebugEnabled())
- log.debug(dir + " => " + targetDir);
- } catch (IOException e) {
- throw new SlcException(sourceRepoDir
- + " could not be copied properly, check it manually."
- + " Metadata have NOT been updated.", e);
- }
-
- reposToRecreate.add(dir);
- reposToRecreate.add(targetDir);
- File debugInfoDir = new File(dir, debuginfoDirName);
- if (debugInfoDir.exists())
- reposToRecreate.add(debugInfoDir);
- File targetDebugInfoDir = new File(targetDir, debuginfoDirName);
- if (targetDebugInfoDir.exists())
- reposToRecreate.add(targetDebugInfoDir);
-
- }
-
- // clear staging
- for (File dir : sourceRepoDir.listFiles()) {
- try {
- if (dir.getName().equals("lost+found"))
- continue;
- if (dir.isDirectory())
- FileUtils.deleteDirectory(dir);
- } catch (IOException e) {
- log.error("Could not delete " + dir + ". " + e);
- }
- }
-
- // recreate changed repos
- for (File repoToRecreate : reposToRecreate) {
- repoToRecreate.mkdirs();
- SystemCall createrepo = new SystemCall();
- createrepo.arg("createrepo");
- // sqllite db
- createrepo.arg("-d");
- // debuginfo
- if (!repoToRecreate.getName().equals(debuginfoDirName))
- createrepo.arg("-x").arg(debuginfoDirName + "/*");
- // quiet
- createrepo.arg("-q");
- createrepo.arg(repoToRecreate.getAbsolutePath());
-
- createrepo.setExecutor(executor);
- createrepo.run();
- log.info("Updated repo " + repoToRecreate);
- }
-
- rpmFactory.indexWorkspace(sourceWorkspace);
- rpmFactory.indexWorkspace(targetWorkspace);
- }
-
- public void setRpmFactory(RpmFactory rpmFactory) {
- this.rpmFactory = rpmFactory;
- }
-
- public void setExecutor(Executor executor) {
- this.executor = executor;
- }
-
- public void setDebuginfoDirName(String debuginfoDirName) {
- this.debuginfoDirName = debuginfoDirName;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import java.util.List;
-
-/** A consistent distributable set of RPM. */
-public class RpmDistribution {
- private String id;
- private List<String> packages;
- private List<RpmPackageSet> excludedPackages;
-
- public List<String> getPackages() {
- return packages;
- }
-
- public void setPackages(List<String> packages) {
- this.packages = packages;
- }
-
- public String getId() {
- return id;
- }
-
- public void setId(String id) {
- this.id = id;
- }
-
- public List<RpmPackageSet> getExcludedPackages() {
- return excludedPackages;
- }
-
- public void setExcludedPackages(List<RpmPackageSet> excludedPackages) {
- this.excludedPackages = excludedPackages;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.GregorianCalendar;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.jcr.Node;
-import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-
-import org.apache.commons.io.FileUtils;
-import org.argeo.api.cms.CmsLog;
-import org.argeo.jcr.JcrUtils;
-import org.argeo.slc.SlcConstants;
-import org.argeo.slc.SlcException;
-import org.argeo.slc.repo.NodeIndexerVisitor;
-import org.argeo.slc.rpmfactory.RpmFactory;
-import org.argeo.slc.rpmfactory.RpmRepository;
-import org.argeo.slc.runtime.tasks.SystemCall;
-
-/**
- * Defines a build environment. This information is typically used by other
- * components performing the various actions related to RPM build.
- */
-public class RpmFactoryImpl implements RpmFactory {
- private CmsLog log = CmsLog.getLog(RpmFactoryImpl.class);
-
- private Repository rpmRepository;
- private Repository distRepository;
-
- private String id;
- private List<RpmRepository> repositories = new ArrayList<RpmRepository>();
- private List<String> archs = new ArrayList<String>();
-
- private String rpmBase = "/mnt/slc/repos/rpm";
- private String distBase = "/mnt/slc/repos/dist";
- private String mockVar = "/var/lib/mock";
- private String mockEtc = "/etc/mock";
-
- private DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmm");
-
- private String gitWorkspace = "git";
-
- private String localUrlBase = "http://localhost:7070/";
- /** If not null or empty, this is a developer instance. */
- private String gitDevBaseUrl = null;
-
- private Boolean withTestingRepository = false;
-
- private String yumConfigMainSection = "cachedir=/var/cache/yum\n"
- + "debuglevel=1\n" + "reposdir=/dev/null\n"
- + "logfile=/var/log/yum.log\n" + "retries=20\n" + "obsoletes=1\n"
- + "gpgcheck=0\n" + "assumeyes=1\n" + "syslog_ident=mock\n"
- + "syslog_device=\n" + "http_caching=none\n";
-
- private String defaultMacroFiles = "/usr/lib/rpm/macros:"
- + "/usr/lib/rpm/ia32e-linux/macros:"
- + "/usr/lib/rpm/redhat/macros:" + "/etc/rpm/macros.*:"
- + "/etc/rpm/macros:" + "/etc/rpm/ia32e-linux/macros:"
- + "~/.rpmmacros";
- private Map<String, String> rpmmacros = new HashMap<String, String>();
-
- // set by init
- private String proxiedReposBase;
- private String managedReposBase;
-
- private String stagingWorkspace;
- private String testingWorkspace;
- private String stableWorkspace;
-
- private File rpmFactoryBaseDir;
- private File mockConfDir;
- private File yumConfDir;
-
- public void init() {
- // local URL bases
- proxiedReposBase = localUrlBase + "repo/rpm/";
- managedReposBase = localUrlBase + "data/public/rpm/";
-
- // local directories
- rpmFactoryBaseDir.mkdirs();
- mockConfDir = new File(rpmFactoryBaseDir.getPath() + "/conf/mock");
- mockConfDir.mkdirs();
- yumConfDir = new File(rpmFactoryBaseDir.getPath() + "/conf/yum");
- yumConfDir.mkdirs();
-
- // managed repositories
- stagingWorkspace = id + "-staging";
- if (withTestingRepository)
- testingWorkspace = id + "-testing";
- stableWorkspace = id;
-
- initDistWorkspace(stableWorkspace);
- initGitWorkspace();
- initRpmWorkspace(stagingWorkspace);
- if (withTestingRepository)
- initRpmWorkspace(testingWorkspace);
- initRpmWorkspace(stableWorkspace);
- }
-
- protected void initRpmWorkspace(String workspace) {
- Session session = null;
- try {
- session = JcrUtils.loginOrCreateWorkspace(rpmRepository, workspace);
- JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
- JcrUtils.addPrivilege(session, "/", SlcConstants.ROLE_SLC,
- "jcr:all");
-
- for (String arch : archs) {
- Node archFolder = JcrUtils.mkfolders(session, "/" + arch);
- session.save();
- File workspaceDir = getWorkspaceDir(workspace);
- try {
- if (!archFolder.hasNode("repodata")) {
- // touch a file in order to make sure this is properly
- // mounted.
- File touch = new File(workspaceDir, ".touch");
- touch.createNewFile();
- touch.delete();
-
- SystemCall createrepo = new SystemCall();
- createrepo.arg("createrepo");
- createrepo.arg("-q");
- File archDir = new File(workspaceDir, arch);
- createrepo.arg(archDir.getAbsolutePath());
- createrepo.run();
- }
- } catch (IOException e) {
- log.error(workspaceDir + " not properly mounted.", e);
- }
- }
- } catch (Exception e) {
- throw new SlcException("Cannot initialize workspace " + workspace,
- e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- /** Caller must logout the underlying session. */
- public Node newDistribution(String distributionId) {
- Session session = null;
- try {
- session = JcrUtils.loginOrCreateWorkspace(rpmRepository,
- distributionId);
- JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
- JcrUtils.addPrivilege(session, "/", SlcConstants.ROLE_SLC,
- "jcr:all");
-
- Calendar now = new GregorianCalendar();
- String folderName = dateFormat.format(now.getTime());
- return JcrUtils.mkfolders(session, "/" + folderName);
- } catch (Exception e) {
- JcrUtils.logoutQuietly(session);
- throw new SlcException("Cannot initialize distribution workspace "
- + distributionId, e);
- }
- }
-
- protected void initGitWorkspace() {
- Session session = null;
- try {
- session = JcrUtils.loginOrCreateWorkspace(rpmRepository,
- gitWorkspace);
- JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
- JcrUtils.addPrivilege(session, "/", SlcConstants.ROLE_SLC,
- "jcr:all");
- } catch (Exception e) {
- throw new SlcException("Cannot initialize workspace "
- + gitWorkspace, e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- protected void initDistWorkspace(String workspace) {
- Session session = null;
- try {
- session = JcrUtils
- .loginOrCreateWorkspace(distRepository, workspace);
- JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
- } catch (RepositoryException e) {
- throw new SlcException("Cannot initialize workspace " + workspace,
- e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- public void destroy() {
-
- }
-
- public String generateMockConfigFile(String arch, String branch) {
- StringBuffer buf = new StringBuffer();
-
- buf.append("config_opts['root'] = '" + getIdWithArch(arch) + "'\n");
- buf.append("config_opts['target_arch'] = '" + arch + "'\n");
- buf.append("config_opts['legal_host_arches'] = ('" + arch + "',)\n");
- buf.append("config_opts['chroot_setup_cmd'] = 'groupinstall buildsys-build'\n");
- // buf.append("config_opts['dist'] = 'el6'\n");
- buf.append("config_opts['plugin_conf']['yum_cache_enable'] = False\n");
-
- buf.append("config_opts['scm'] = False\n");
- buf.append("config_opts['scm_opts']['method'] = 'git'\n");
- buf.append("config_opts['scm_opts']['spec'] = 'SCM_PKG.spec'\n");
- buf.append("config_opts['scm_opts']['ext_src_dir'] = '"
- + getSourcesDir().getAbsolutePath() + "'\n");
- buf.append("config_opts['scm_opts']['git_timestamps'] = True\n");
-
- // development
- if (gitDevBaseUrl != null && !gitDevBaseUrl.trim().equals(""))
- buf.append("config_opts['scm_opts']['git_get'] = 'git clone "
- + (branch != null ? "-b " + branch : "") + " "
- + gitDevBaseUrl + "/SCM_PKG SCM_PKG'\n");
- else
- buf.append("config_opts['scm_opts']['git_get'] = 'git clone "
- + (branch != null ? "-b " + branch : "") + " "
- + getGitBaseUrl() + "/SCM_PKG.git SCM_PKG'\n");
-
- buf.append("\nconfig_opts['yum.conf'] = \"\"\"\n");
- buf.append(generateYumConfigFile(arch)).append('\n');
- buf.append("\"\"\"\n");
- return buf.toString();
- }
-
- public String generateYumConfigFile(String arch) {
- StringBuffer buf = new StringBuffer();
- buf.append("[main]\n");
- buf.append(yumConfigMainSection).append('\n');
-
- for (RpmRepository repository : repositories) {
- buf.append('[').append(repository.getId()).append("]\n");
- buf.append("name=").append(repository.getId()).append('\n');
- if (repository instanceof ThirdPartyRpmRepository) {
- buf.append("#baseurl=").append(repository.getUrl())
- .append(arch).append('/').append("\n");
- buf.append("baseurl=").append(proxiedReposBase)
- .append(repository.getId()).append('/').append(arch)
- .append('/').append("\n");
- if (((ThirdPartyRpmRepository) repository).getYumConf() != null)
- buf.append(
- ((ThirdPartyRpmRepository) repository).getYumConf()
- .trim()).append('\n');
- }
- }
-
- // managed repos
- addManagedRepository(buf, stagingWorkspace, arch);
- if (withTestingRepository)
- addManagedRepository(buf, testingWorkspace, arch);
- addManagedRepository(buf, stableWorkspace, arch);
- return buf.toString();
- }
-
- protected void addManagedRepository(StringBuffer buf, String workspace,
- String arch) {
- buf.append('[').append(workspace).append("]\n");
- buf.append("baseurl=").append(managedReposBase).append(workspace)
- .append('/').append(arch).append('/').append("\n");
- buf.append("gpgcheck=0").append("\n");
- }
-
- /** Creates a mock config file. */
- public File getMockConfigFile(String arch, String branch) {
- File mockSiteDefaultsFile = new File(mockConfDir, "site-defaults.cfg");
- File mockLoggingFile = new File(mockConfDir, "logging.ini");
- File mockConfigFile = new File(mockConfDir, getIdWithArch(arch)
- + ".cfg");
- try {
- if (!mockSiteDefaultsFile.exists())
- mockSiteDefaultsFile.createNewFile();
- if (!mockLoggingFile.exists())
- FileUtils.copyFile(new File(mockEtc + "/logging.ini"),
- mockLoggingFile);
-
- FileUtils.writeStringToFile(mockConfigFile,
- generateMockConfigFile(arch, branch));
- return mockConfigFile;
- } catch (IOException e) {
- throw new SlcException("Cannot write mock config file to "
- + mockConfigFile, e);
- }
- }
-
- /** Creates a yum config file. */
- public File getYumRepoFile(String arch) {
- File yumConfigFile = new File(yumConfDir, getIdWithArch(arch) + ".repo");
- try {
- FileUtils.writeStringToFile(yumConfigFile,
- generateYumConfigFile(arch));
- return yumConfigFile;
- } catch (IOException e) {
- throw new SlcException("Cannot write yum config file to "
- + yumConfigFile, e);
- }
- }
-
- public File getResultDir(String arch) {
- return new File(mockVar + "/" + getIdWithArch(arch) + "/result");
- }
-
- public File getWorkspaceDir(String workspace) {
- return new File(rpmBase + "/" + workspace);
- }
-
- public File getSourcesDir() {
- return new File(distBase + "/" + stableWorkspace);
- }
-
- public String getMockConfig(String arch) {
- return getIdWithArch(arch);
- }
-
- public String getIdWithArch(String arch) {
- return id + "-" + arch;
- }
-
- public String getGitBaseUrl() {
- return managedReposBase + gitWorkspace;
- }
-
- public void indexWorkspace(String workspace) {
- Session session = null;
- try {
- session = rpmRepository.login(workspace);
- session.getRootNode().accept(
- new NodeIndexerVisitor(new RpmIndexer()));
- if (log.isDebugEnabled())
- log.debug("Indexed workspace " + workspace);
- } catch (RepositoryException e) {
- throw new SlcException("Cannot index workspace " + workspace, e);
- } finally {
- JcrUtils.logoutQuietly(session);
- }
- }
-
- public Boolean isDeveloperInstance() {
- return gitDevBaseUrl != null;
- }
-
- /** Write (topdir)/rpmmacros and (topdir)/rpmrc */
- public void writeRpmbuildConfigFiles(File topdir) {
- writeRpmbuildConfigFiles(topdir, new File(topdir, "rpmmacros"),
- new File(topdir, "rpmrc"));
- }
-
- public void writeRpmbuildConfigFiles(File topdir, File rpmmacroFile,
- File rpmrcFile) {
- try {
- List<String> macroLines = new ArrayList<String>();
- macroLines.add("%_topdir " + topdir.getCanonicalPath());
- for (String macroKey : rpmmacros.keySet()) {
- macroLines.add(macroKey + " " + rpmmacros.get(macroKey));
- }
- FileUtils.writeLines(rpmmacroFile, macroLines);
-
- List<String> rpmrcLines = new ArrayList<String>();
- rpmrcLines.add("include: /usr/lib/rpm/rpmrc");
- rpmrcLines.add("macrofiles: " + defaultMacroFiles + ":"
- + rpmmacroFile.getCanonicalPath());
- FileUtils.writeLines(rpmrcFile, rpmrcLines);
- } catch (IOException e) {
- throw new SlcException("Cannot write rpmbuild config files", e);
- }
-
- }
-
- public Map<String, String> getRpmmacros() {
- return rpmmacros;
- }
-
- public void setRpmmacros(Map<String, String> rpmmacros) {
- this.rpmmacros = rpmmacros;
- }
-
- public String getDefaultMacroFiles() {
- return defaultMacroFiles;
- }
-
- public void setDefaultMacroFiles(String defaultMacroFiles) {
- this.defaultMacroFiles = defaultMacroFiles;
- }
-
- public void setArchs(List<String> archs) {
- this.archs = archs;
- }
-
- public List<String> getArchs() {
- return archs;
- }
-
- public void setRpmBase(String stagingBase) {
- this.rpmBase = stagingBase;
- }
-
- public void setId(String id) {
- this.id = id;
- }
-
- public void setMockVar(String mockVar) {
- this.mockVar = mockVar;
- }
-
- public void setRpmRepository(Repository rpmRepository) {
- this.rpmRepository = rpmRepository;
- }
-
- public void setDistRepository(Repository distRepository) {
- this.distRepository = distRepository;
- }
-
- public void setLocalUrlBase(String localUrlBase) {
- this.localUrlBase = localUrlBase;
- }
-
- public void setYumConfigMainSection(String yumConfigMainSection) {
- this.yumConfigMainSection = yumConfigMainSection;
- }
-
- public void setRepositories(List<RpmRepository> repositories) {
- this.repositories = repositories;
- }
-
- public void setRpmFactoryBaseDir(File rpmFactoryBaseDir) {
- this.rpmFactoryBaseDir = rpmFactoryBaseDir;
- }
-
- public String getStagingWorkspace() {
- return stagingWorkspace;
- }
-
- public String getTestingWorkspace() {
- return testingWorkspace;
- }
-
- public String getStableWorkspace() {
- return stableWorkspace;
- }
-
- public void setWithTestingRepository(Boolean withTestingRepository) {
- this.withTestingRepository = withTestingRepository;
- }
-
- public void setGitDevBaseUrl(String gitBaseUrl) {
- this.gitDevBaseUrl = gitBaseUrl;
- }
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import static org.redline_rpm.header.Header.HeaderTag.HEADERIMMUTABLE;
-import static org.redline_rpm.header.Signature.SignatureTag.SIGNATURES;
-
-import java.io.InputStream;
-import java.nio.ByteBuffer;
-import java.nio.channels.Channels;
-
-import javax.jcr.Node;
-import javax.jcr.Property;
-import javax.jcr.nodetype.NodeType;
-
-import org.apache.commons.io.FilenameUtils;
-import org.argeo.slc.SlcException;
-import org.argeo.slc.SlcNames;
-import org.argeo.slc.SlcTypes;
-import org.argeo.slc.repo.NodeIndexer;
-import org.redline_rpm.ChannelWrapper.Key;
-import org.redline_rpm.ReadableChannelWrapper;
-import org.redline_rpm.header.AbstractHeader;
-import org.redline_rpm.header.Format;
-import org.redline_rpm.header.Header;
-
-/** Indexes an RPM file. */
-public class RpmIndexer implements NodeIndexer, SlcNames {
- private Boolean force = false;
-
- @Override
- public Boolean support(String path) {
- return FilenameUtils.getExtension(path).equals("rpm");
- }
-
- @Override
- public void index(Node node) {
- try {
- if (!support(node.getPath()))
- return;
-
- // Already indexed
- if (!force && node.isNodeType(SlcTypes.SLC_RPM))
- return;
-
- if (!node.isNodeType(NodeType.NT_FILE))
- return;
-
- InputStream in = node.getNode(Node.JCR_CONTENT)
- .getProperty(Property.JCR_DATA).getBinary().getStream();
- ReadableChannelWrapper channel = new ReadableChannelWrapper(
- Channels.newChannel(in));
- Format format = readRpmInfo(channel);
-
- node.addMixin(SlcTypes.SLC_RPM);
- node.setProperty(SLC_NAME, readTag(format, Header.HeaderTag.NAME));
- String rpmVersion = readTag(format, Header.HeaderTag.VERSION);
- String rpmRelease = readTag(format, Header.HeaderTag.RELEASE);
- node.setProperty(SLC_RPM_VERSION, rpmVersion);
- node.setProperty(SLC_RPM_RELEASE, rpmRelease);
- node.setProperty(SLC_VERSION, rpmVersion + "-" + rpmRelease);
-
- String arch = readTag(format, Header.HeaderTag.ARCH);
- if (arch != null)
- node.setProperty(SLC_RPM_ARCH, arch);
-
- String archiveSize = readTag(format, Header.HeaderTag.ARCHIVESIZE);
- if (archiveSize != null)
- node.setProperty(SLC_RPM_ARCHIVE_SIZE,
- Long.parseLong(archiveSize));
-
- node.getSession().save();
- } catch (Exception e) {
- throw new SlcException("Cannot index " + node, e);
- }
-
- }
-
- @SuppressWarnings("unused")
- public Format readRpmInfo(ReadableChannelWrapper channel) throws Exception {
- Format format = new Format();
-
- Key<Integer> lead = channel.start();
- format.getLead().read(channel);
- // System.out.println( "Lead ended at '" + in.finish( lead) + "'.");
-
- Key<Integer> signature = channel.start();
- int count = format.getSignature().read(channel);
- int expected = ByteBuffer
- .wrap((byte[]) format.getSignature().getEntry(SIGNATURES)
- .getValues(), 8, 4).getInt()
- / -16;
- // System.out.println( "Signature ended at '" + in.finish( signature) +
- // "' and contained '" + count + "' headers (expected '" + expected +
- // "').");
-
- Key<Integer> header = channel.start();
- count = format.getHeader().read(channel);
- expected = ByteBuffer.wrap(
- (byte[]) format.getHeader().getEntry(HEADERIMMUTABLE)
- .getValues(), 8, 4).getInt()
- / -16;
- // System.out.println( "Header ended at '" + in.finish( header) +
- // " and contained '" + count + "' headers (expected '" + expected +
- // "').");
-
- return format;
- }
-
- private String readTag(Format format, Header.HeaderTag tag) {
- AbstractHeader.Entry<?> entry = format.getHeader().getEntry(tag);
- if (entry == null)
- return null;
- if (entry.getValues() == null)
- return null;
- Object[] values = (Object[]) entry.getValues();
- return values[0].toString().trim();
- }
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-/** Set of RPM packages */
-public interface RpmPackageSet {
- public Boolean contains(String rpmPackage);
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.Set;
-
-import javax.jcr.Node;
-import javax.jcr.RepositoryException;
-import javax.jcr.Session;
-import javax.jcr.nodetype.NodeType;
-import javax.jcr.security.AccessControlException;
-
-import org.argeo.api.cms.CmsLog;
-import org.argeo.cms.ArgeoNames;
-import org.argeo.jcr.JcrUtils;
-import org.argeo.jcr.proxy.AbstractUrlProxy;
-import org.argeo.slc.SlcConstants;
-import org.argeo.slc.SlcException;
-import org.argeo.slc.SlcNames;
-import org.argeo.slc.SlcTypes;
-import org.argeo.slc.repo.RepoConstants;
-import org.argeo.slc.rpmfactory.RpmProxyService;
-import org.argeo.slc.rpmfactory.RpmRepository;
-
-/** Synchronises the node repository with remote Maven repositories */
-public class RpmProxyServiceImpl extends AbstractUrlProxy implements
- RpmProxyService, ArgeoNames, SlcNames {
- private final static CmsLog log = CmsLog.getLog(RpmProxyServiceImpl.class);
-
- private Set<RpmRepository> defaultRepositories = new HashSet<RpmRepository>();
-
- @Override
- protected void beforeInitSessionSave(Session session)
- throws RepositoryException {
- JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
- try {
- JcrUtils.addPrivilege(session, "/", SlcConstants.ROLE_SLC,
- "jcr:all");
- } catch (AccessControlException e) {
- if (log.isTraceEnabled())
- log.trace("Cannot give jcr:all privileges to "+SlcConstants.ROLE_SLC);
- }
-
- JcrUtils.mkdirsSafe(session, RepoConstants.PROXIED_REPOSITORIES);
- }
-
- /**
- * Retrieve and add this file to the repository
- */
- @Override
- protected Node retrieve(Session session, String path) {
- StringBuilder relativePathBuilder = new StringBuilder();
- String repoId = extractRepoId(path, relativePathBuilder);
- // remove starting '/'
- String relativePath = relativePathBuilder.toString().substring(1);
-
- RpmRepository sourceRepo = null;
- for (Iterator<RpmRepository> reposIt = defaultRepositories.iterator(); reposIt
- .hasNext();) {
- RpmRepository rpmRepo = reposIt.next();
- if (rpmRepo.getId().equals(repoId)) {
- sourceRepo = rpmRepo;
- break;
- }
- }
-
- if (sourceRepo == null)
- throw new SlcException("No RPM repository found for " + path);
-
- try {
- String baseUrl = sourceRepo.getUrl();
- String remoteUrl = baseUrl + relativePath;
- Node node = proxyUrl(session, remoteUrl, path);
- if (node != null) {
- registerSource(sourceRepo, node, remoteUrl);
- if (log.isDebugEnabled())
- log.debug("Imported " + remoteUrl + " to " + node);
- return node;
- }
- } catch (Exception e) {
- throw new SlcException("Cannot proxy " + path, e);
- }
- JcrUtils.discardQuietly(session);
- throw new SlcException("No proxy found for " + path);
- }
-
- protected void registerSource(RpmRepository sourceRepo, Node node,
- String remoteUrl) throws RepositoryException {
- node.addMixin(SlcTypes.SLC_KNOWN_ORIGIN);
- Node origin;
- if (!node.hasNode(SLC_ORIGIN))
- origin = node.addNode(SLC_ORIGIN, SlcTypes.SLC_PROXIED);
- else
- origin = node.getNode(SLC_ORIGIN);
-
- // proxied repository
- Node proxiedRepository;
- String proxiedRepositoryPath = RepoConstants.PROXIED_REPOSITORIES + '/'
- + sourceRepo.getId();
- Session session = node.getSession();
- if (session.itemExists(proxiedRepositoryPath)) {
- proxiedRepository = session.getNode(proxiedRepositoryPath);
- } else {
- proxiedRepository = session.getNode(
- RepoConstants.PROXIED_REPOSITORIES).addNode(
- sourceRepo.getId());
- proxiedRepository.addMixin(NodeType.MIX_REFERENCEABLE);
- JcrUtils.urlToAddressProperties(proxiedRepository,
- sourceRepo.getUrl());
- proxiedRepository.setProperty(SLC_URL, sourceRepo.getUrl());
- }
-
- origin.setProperty(SLC_PROXY, proxiedRepository);
- JcrUtils.urlToAddressProperties(origin, remoteUrl);
- }
-
- /** Returns the first token of the path */
- protected String extractRepoId(String path, StringBuilder relativePath) {
- StringBuilder workspace = new StringBuilder();
- StringBuilder buf = workspace;
- for (int i = 1; i < path.length(); i++) {
- char c = path.charAt(i);
- if (c == '/') {
- buf = relativePath;
- }
- buf.append(c);
- }
- return workspace.toString();
- }
-
- @Override
- protected Boolean shouldUpdate(Session clientSession, String nodePath) {
- // if (nodePath.contains("/repodata/"))
- // return true;
- return super.shouldUpdate(clientSession, nodePath);
- }
-
- public void setDefaultRepositories(Set<RpmRepository> defaultRepositories) {
- this.defaultRepositories = defaultRepositories;
- }
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class RpmSpecFile {
- private Path specFile;
-
- private String name;
- private String version;
- private String release;
- private Map<String, String> sources = new HashMap<String, String>();
- private Map<String, String> patches = new HashMap<String, String>();
-
- public RpmSpecFile(Path specFile) {
- this.specFile = specFile;
- parseSpecFile();
- }
-
- public void init() {
- parseSpecFile();
- }
-
- protected void parseSpecFile() {
- try {
- List<String> lines = (List<String>) Files.readAllLines(specFile);
-
- lines: for (String line : lines) {
- int indexSemiColon = line.indexOf(':');
- if (indexSemiColon <= 0)
- continue lines;
- String directive = line.substring(0, indexSemiColon).trim();
- String value = line.substring(indexSemiColon + 1).trim();
- if ("name".equals(directive.toLowerCase()))
- name = value;
- else if ("version".equals(directive.toLowerCase()))
- version = value;
- else if ("release".equals(directive.toLowerCase()))
- release = value;
- else if (directive.toLowerCase().startsWith("source"))
- sources.put(directive, interpret(value));
- else if (directive.toLowerCase().startsWith("patch"))
- patches.put(directive, interpret(value));
- }
-
- } catch (IOException e) {
- throw new RuntimeException("Cannot parse spec file " + specFile, e);
- }
- }
-
- protected String interpret(String value) {
- StringBuffer buf = new StringBuffer(value.length());
- StringBuffer currKey = null;
- boolean mayBeKey = false;
- chars: for (char c : value.toCharArray()) {
- if (c == '%')
- mayBeKey = true;
- else if (c == '{') {
- if (mayBeKey)
- currKey = new StringBuffer();
- } else if (c == '}') {
- if (currKey == null)
- continue chars;
- String key = currKey.toString();
- if ("name".equals(key.toLowerCase()))
- buf.append(name);
- else if ("version".equals(key.toLowerCase()))
- buf.append(version);
- else
- buf.append("%{").append(key).append('}');
- currKey = null;
- } else {
- if (currKey != null)
- currKey.append(c);
- else
- buf.append(c);
- }
- }
- return buf.toString();
- }
-
- public Path getSpecFile() {
- return specFile;
- }
-
- public String getName() {
- return name;
- }
-
- public String getVersion() {
- return version;
- }
-
- public String getRelease() {
- return release;
- }
-
- public Map<String, String> getSources() {
- return sources;
- }
-
- public Map<String, String> getPatches() {
- return patches;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-/** Local build repository, used only for builds. */
-public class StagingRpmRepository extends AbstractRpmRepository {
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-/**
- * A repository of third party RPMs used for the build. RPM used by the builds
- * will be cached within the system.
- */
-public class ThirdPartyRpmRepository extends AbstractRpmRepository {
- private String yumConf;
-
- public String getYumConf() {
- return yumConf;
- }
-
- public void setYumConf(String yumConf) {
- this.yumConf = yumConf;
- }
-
-}
+++ /dev/null
-package org.argeo.slc.rpmfactory.core;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.Set;
-import java.util.StringTokenizer;
-import java.util.TreeSet;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.io.LineIterator;
-import org.argeo.api.cms.CmsLog;
-import org.argeo.slc.SlcException;
-
-/**
- * Reads the output of a 'yum list all' command and interpret the list of
- * packages.
- */
-public class YumListParser implements RpmPackageSet {
- private final static CmsLog log = CmsLog.getLog(YumListParser.class);
-
- private Set<String> installed = new TreeSet<String>();
- /** Not installed but available */
- private Set<String> installable = new TreeSet<String>();
-
- private Path yumListOutput;
-
- public void init() {
- if (yumListOutput != null) {
- try (InputStream in = Files.newInputStream(yumListOutput)) {
- load(in);
- if (log.isDebugEnabled())
- log.debug(installed.size() + " installed, " + installable.size() + " installable, from "
- + yumListOutput);
- } catch (IOException e) {
- throw new SlcException("Cannot initialize yum list parser", e);
- }
- }
- }
-
- public Boolean contains(String packageName) {
- if (installed.contains(packageName))
- return true;
- else
- return installable.contains(packageName);
- }
-
- protected void load(InputStream in) throws IOException {
- Boolean readingInstalled = false;
- Boolean readingAvailable = false;
- LineIterator it = IOUtils.lineIterator(in, "UTF-8");
- while (it.hasNext()) {
- String line = it.nextLine();
- if (line.trim().equals("Installed Packages")) {
- readingInstalled = true;
- } else if (line.trim().equals("Available Packages")) {
- readingAvailable = true;
- readingInstalled = false;
- } else if (readingAvailable) {
- if (Character.isLetterOrDigit(line.charAt(0))) {
- installable.add(extractRpmName(line));
- }
- } else if (readingInstalled) {
- if (Character.isLetterOrDigit(line.charAt(0))) {
- installed.add(extractRpmName(line));
- }
- }
- }
- }
-
- protected String extractRpmName(String line) {
- StringTokenizer st = new StringTokenizer(line, " \t");
- String packageName = st.nextToken();
- // consider the arch as an extension
- return FilenameUtils.getBaseName(packageName);
- // return packageName.split("\\.")[0];
- }
-
- public Set<String> getInstalled() {
- return installed;
- }
-
- public Set<String> getInstallable() {
- return installable;
- }
-
- public void setYumListOutput(Path yumListOutput) {
- this.yumListOutput = yumListOutput;
- }
-
-}
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" output="target/classes" path="src"/>
+ <classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>>>
+ <classpathentry kind="output" path="bin"/>
+</classpath>
--- /dev/null
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>org.argeo.slc.rpmfactory</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.ManifestBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.pde.SchemaBuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ <nature>org.eclipse.pde.PluginNature</nature>
+ </natures>
+</projectDescription>
--- /dev/null
+/MANIFEST.MF
--- /dev/null
+Import-Package: javax.jcr.nodetype,\
+org.argeo.slc.repo,\
+org.osgi.*;version=0.0.0,\
+*
+
\ No newline at end of file
--- /dev/null
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <parent>
+ <groupId>org.argeo.slc</groupId>
+ <artifactId>argeo-slc</artifactId>
+ <version>2.3-SNAPSHOT</version>
+ <relativePath>..</relativePath>
+ </parent>
+ <artifactId>org.argeo.slc.factory</artifactId>
+ <name>SLC Factory</name>
+ <dependencies>
+ <!-- SLC -->
+ <dependency>
+ <groupId>org.argeo.slc</groupId>
+ <artifactId>org.argeo.slc.runtime</artifactId>
+ <version>2.3-SNAPSHOT</version>
+ </dependency>
+
+
+ <dependency>
+ <groupId>org.argeo.slc</groupId>
+ <artifactId>org.argeo.slc.repo</artifactId>
+ <version>2.3-SNAPSHOT</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.argeo.commons</groupId>
+ <artifactId>org.argeo.api.cms</artifactId>
+ <version>${version.argeo-commons}</version>
+ </dependency>
+
+ </dependencies>
+</project>
\ No newline at end of file
--- /dev/null
+package org.argeo.slc.rpmfactory;
+
+import java.io.File;
+import java.util.List;
+
+import javax.jcr.Node;
+
+/**
+ * Defines a build environment. This information is typically used by other
+ * components performing the various actions related to RPM build.
+ */
+public interface RpmFactory {
+ //
+ // DIRECT ACTIONS ON JCR REPOSITORY
+ //
+ public void indexWorkspace(String workspace);
+
+ public Node newDistribution(String distributionId);
+
+ //
+ // CONFIG FILES GENERATION
+ //
+ /** Creates a mock config file. */
+ public File getMockConfigFile(String arch, String branch);
+
+ /** Creates a yum config file. */
+ public File getYumRepoFile(String arch);
+
+ //
+ // WORKSPACES
+ //
+ public String getStagingWorkspace();
+
+ /**
+ * @return the name of the testing workspace, or null if and only if the
+ * testing workspace was not enabled.
+ */
+ public String getTestingWorkspace();
+
+ public String getStableWorkspace();
+
+ public File getWorkspaceDir(String workspace);
+
+ //
+ // ARCH DEPENDENT INFOS
+ //
+ public List<String> getArchs();
+
+ public String getMockConfig(String arch);
+
+ public String getIdWithArch(String arch);
+
+ public File getResultDir(String arch);
+
+ //
+ // DEPLOYMENT
+ //
+ public String getGitBaseUrl();
+
+ public Boolean isDeveloperInstance();
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory;
+
+import org.argeo.jcr.proxy.ResourceProxy;
+
+/** Marker interface (useful for OSGi services references), may be extended later */
+public interface RpmProxyService extends ResourceProxy {
+}
--- /dev/null
+package org.argeo.slc.rpmfactory;
+
+/** A YUM compatible repository of RPM packages. */
+public interface RpmRepository {
+ public String getId();
+
+ public String getUrl();
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import org.argeo.slc.rpmfactory.RpmRepository;
+
+/** Common method to RPM repositories. */
+public abstract class AbstractRpmRepository implements RpmRepository {
+ private String id;
+ private String url;
+
+ @Override
+ public String getId() {
+ return id;
+ }
+
+ @Override
+ public String getUrl() {
+ return url;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public void setUrl(String url) {
+ this.url = url;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+import org.apache.commons.exec.Executor;
+import org.apache.commons.io.FileUtils;
+import org.argeo.api.cms.CmsLog;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.rpmfactory.RpmFactory;
+import org.argeo.slc.runtime.tasks.SystemCall;
+
+/** Build an RPM in mock. */
+public class BuildInMock implements Runnable {
+ private final static CmsLog log = CmsLog.getLog(BuildInMock.class);
+ private final static String NOARCH = "noarch";
+
+ private String rpmPackage = null;
+ private String branch = null;
+ private String arch = NOARCH;
+
+ private RpmFactory rpmFactory;
+ private Executor executor;
+
+ private String debuginfoDirName = "debuginfo";
+ private String mockExecutable = "/usr/bin/mock";
+
+ private List<String> preBuildCommands = new ArrayList<String>();
+
+ public void run() {
+ if (!rpmFactory.isDeveloperInstance()) {
+ // clean/init
+ SystemCall mockClean = createBaseMockCall();
+ mockClean.arg("--init");
+ mockClean.run();
+ }
+
+ // pre build commands
+ for (String preBuildCmd : preBuildCommands) {
+ SystemCall mockClean = createBaseMockCall();
+ mockClean.arg("--chroot").arg(preBuildCmd);
+ mockClean.run();
+ }
+
+ // actual build
+ SystemCall mockBuild = createBaseMockCall();
+ mockBuild.arg("--scm-enable");
+ mockBuild.arg("--scm-option").arg("package=" + rpmPackage);
+ mockBuild.arg("--no-clean");
+ //
+ //
+ mockBuild.run();
+ //
+
+ // copy RPMs to target directories
+ File stagingDir = rpmFactory.getWorkspaceDir(rpmFactory
+ .getStagingWorkspace());
+ File srpmDir = new File(stagingDir, "SRPMS");
+ srpmDir.mkdirs();
+ File archDir = null;
+ File debuginfoDir = null;
+ if (!arch.equals(NOARCH)) {
+ archDir = new File(stagingDir, arch);
+ debuginfoDir = new File(archDir, debuginfoDirName);
+ debuginfoDir.mkdirs();
+ }
+
+ Set<File> reposToRecreate = new HashSet<File>();
+ File resultDir = rpmFactory.getResultDir(arch);
+ if (resultDir.exists())
+ rpms: for (File file : resultDir.listFiles()) {
+ if (file.isDirectory())
+ continue rpms;
+
+ File[] targetDirs;
+ if (file.getName().contains(".src.rpm"))
+ targetDirs = new File[] { srpmDir };
+ else if (file.getName().contains("-debuginfo-"))
+ targetDirs = new File[] { debuginfoDir };
+ else if (!arch.equals(NOARCH)
+ && file.getName().contains("." + arch + ".rpm"))
+ targetDirs = new File[] { archDir };
+ else if (file.getName().contains(".noarch.rpm")) {
+ List<File> dirs = new ArrayList<File>();
+ for (String arch : rpmFactory.getArchs())
+ dirs.add(new File(stagingDir, arch));
+ targetDirs = dirs.toArray(new File[dirs.size()]);
+ } else if (file.getName().contains(".rpm"))
+ throw new SlcException("Don't know where to copy " + file);
+ else {
+ if (log.isTraceEnabled())
+ log.trace("Skip " + file);
+ continue rpms;
+ }
+
+ reposToRecreate.addAll(Arrays.asList(targetDirs));
+ copyToDirs(file, targetDirs);
+ }
+
+ // recreate changed repos
+ for (File repoToRecreate : reposToRecreate) {
+ SystemCall createrepo = new SystemCall();
+ createrepo.arg("createrepo");
+ // sqllite db
+ createrepo.arg("-d");
+ // debuginfo
+ if (!repoToRecreate.getName().equals(debuginfoDirName))
+ createrepo.arg("-x").arg(debuginfoDirName + "/*");
+ // quiet
+ createrepo.arg("-q");
+ createrepo.arg(repoToRecreate.getAbsolutePath());
+
+ createrepo.setExecutor(executor);
+ createrepo.run();
+ log.info("Updated repo " + repoToRecreate);
+ }
+
+ // index staging workspace
+ rpmFactory.indexWorkspace(rpmFactory.getStagingWorkspace());
+ }
+
+ /** Creates a mock call with all the common options such as config file etc. */
+ protected SystemCall createBaseMockCall() {
+ String mockCfg = rpmFactory.getMockConfig(arch);
+ File mockConfigFile = rpmFactory.getMockConfigFile(arch, branch);
+
+ // prepare mock call
+ SystemCall mock = new SystemCall();
+
+ if (arch != null)
+ mock.arg("setarch").arg(arch);
+ mock.arg(mockExecutable);
+ mock.arg("-v");
+ mock.arg("--configdir=" + mockConfigFile.getAbsoluteFile().getParent());
+ if (arch != null)
+ mock.arg("--arch=" + arch);
+ mock.arg("-r").arg(mockCfg);
+
+ mock.setLogCommand(true);
+ mock.setExecutor(executor);
+
+ return mock;
+ }
+
+ protected void copyToDirs(File file, File[] dirs) {
+ for (File dir : dirs) {
+ try {
+ FileUtils.copyFileToDirectory(file, dir);
+ if (log.isDebugEnabled())
+ log.debug(file + " => " + dir);
+ } catch (IOException e) {
+ throw new SlcException("Cannot copy " + file + " to " + dir, e);
+ }
+ }
+ }
+
+ public void setArch(String arch) {
+ this.arch = arch;
+ }
+
+ public void setRpmPackage(String rpmPackage) {
+ this.rpmPackage = rpmPackage;
+ }
+
+ public void setBranch(String branch) {
+ this.branch = branch;
+ }
+
+ public void setRpmFactory(RpmFactory env) {
+ this.rpmFactory = env;
+ }
+
+ public void setExecutor(Executor executor) {
+ this.executor = executor;
+ }
+
+ public void setMockExecutable(String mockExecutable) {
+ this.mockExecutable = mockExecutable;
+ }
+
+ public void setPreBuildCommands(List<String> preBuildCommands) {
+ this.preBuildCommands = preBuildCommands;
+ }
+
+}
\ No newline at end of file
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import java.io.File;
+import java.io.InputStream;
+import java.net.URL;
+import java.util.StringTokenizer;
+
+import javax.jcr.Node;
+import javax.jcr.Session;
+import javax.jcr.nodetype.NodeType;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
+import org.argeo.api.cms.CmsLog;
+import org.argeo.jcr.JcrUtils;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.rpmfactory.RpmFactory;
+import org.argeo.slc.runtime.tasks.SystemCall;
+
+/**
+ * Gather RPMs from various sources (local builds or third party) into a
+ * consistent distributable set (typically to be used to generate an ISO).
+ */
+public class CreateRpmDistribution implements Runnable {
+ private final static CmsLog log = CmsLog
+ .getLog(CreateRpmDistribution.class);
+
+ private RpmFactory rpmFactory;
+ private RpmDistribution rpmDistribution;
+
+ private String arch = "x86_64";
+
+ private String repoqueryExecutable = "/usr/bin/repoquery";
+
+ @Override
+ public void run() {
+ Session session = null;
+ // Reader reader = null;
+ try {
+ Node baseFolder = rpmFactory.newDistribution(rpmDistribution
+ .getId());
+ session = baseFolder.getSession();
+ Node targetFolder = baseFolder.addNode(arch, NodeType.NT_FOLDER);
+
+ SystemCall repoquery = new SystemCall();
+ repoquery.arg(repoqueryExecutable);
+
+ File yumConfigFile = rpmFactory.getYumRepoFile(arch);
+ repoquery.arg("-c", yumConfigFile.getAbsolutePath());
+ repoquery.arg("--requires");
+ repoquery.arg("--resolve");
+ repoquery.arg("--location");
+ repoquery.arg("--archlist=" + arch);
+
+ for (String rpmPackage : rpmDistribution.getPackages())
+ repoquery.arg(rpmPackage);
+
+ if (log.isDebugEnabled())
+ log.debug("Command:\n" + repoquery.asCommand());
+
+ String output = repoquery.function();
+
+ if (log.isDebugEnabled())
+ log.debug(output + "\n");
+ // reader = new StringReader(output);
+ StringTokenizer lines = new StringTokenizer(output, "\n");
+ // List<String> dependencies = IOUtils.readLines(reader);
+ dependencies: while (lines.hasMoreTokens()) {
+ String urlStr = lines.nextToken();
+ InputStream in = null;
+ try {
+ URL url = new URL(urlStr);
+ String fileName = FilenameUtils.getName(url.getFile());
+ String[] tokens = fileName.split("-");
+ if (tokens.length < 3)
+ continue dependencies;
+ StringBuilder buf = new StringBuilder();
+ for (int i = 0; i < tokens.length - 2; i++) {
+ if (i != 0)
+ buf.append('-');
+ buf.append(tokens[i]);
+
+ }
+ String packageName = buf.toString();
+ for (RpmPackageSet excluded : rpmDistribution
+ .getExcludedPackages()) {
+ if (excluded.contains(packageName)) {
+ if (log.isDebugEnabled())
+ log.debug("Skipped " + packageName);
+ continue dependencies;// skip
+ }
+ }
+ in = url.openStream();
+ JcrUtils.copyStreamAsFile(targetFolder, fileName, in);
+ targetFolder.getSession().save();
+ if (log.isDebugEnabled())
+ log.debug("Copied " + packageName);
+ } catch (Exception e) {
+ log.error("Cannot copy " + urlStr, e);
+ } finally {
+ IOUtils.closeQuietly(in);
+ }
+ }
+
+ // createrepo
+ File workspaceDir = rpmFactory.getWorkspaceDir(rpmDistribution
+ .getId());
+ SystemCall createrepo = new SystemCall();
+ createrepo.arg("createrepo");
+ createrepo.arg("-q");
+ createrepo.arg("-d");
+ File archDir = new File(workspaceDir.getPath()
+ + targetFolder.getPath());
+ createrepo.arg(archDir.getAbsolutePath());
+ createrepo.run();
+ } catch (Exception e) {
+ throw new SlcException("Cannot generate distribution "
+ + rpmDistribution.getId(), e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ // IOUtils.closeQuietly(reader);
+ }
+ }
+
+ public void setRpmDistribution(RpmDistribution rpmDistribution) {
+ this.rpmDistribution = rpmDistribution;
+ }
+
+ public void setRpmFactory(RpmFactory rpmFactory) {
+ this.rpmFactory = rpmFactory;
+ }
+
+ public void setArch(String arch) {
+ this.arch = arch;
+ }
+
+ public void setRepoqueryExecutable(String yumdownloaderExecutable) {
+ this.repoqueryExecutable = yumdownloaderExecutable;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.exec.Executor;
+import org.apache.commons.io.FileUtils;
+import org.argeo.api.cms.CmsLog;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.rpmfactory.RpmFactory;
+import org.argeo.slc.runtime.tasks.SystemCall;
+
+/** Releases the content of staging to a public repository. */
+public class ReleaseStaging implements Runnable {
+ private final static CmsLog log = CmsLog.getLog(ReleaseStaging.class);
+
+ private RpmFactory rpmFactory;
+ private Executor executor;
+
+ private String debuginfoDirName = "debuginfo";
+
+ @Override
+ public void run() {
+ String sourceWorkspace = rpmFactory.getStagingWorkspace();
+ File sourceRepoDir = rpmFactory.getWorkspaceDir(sourceWorkspace);
+ String targetWorkspace = rpmFactory.getTestingWorkspace() != null ? rpmFactory
+ .getTestingWorkspace() : rpmFactory.getStableWorkspace();
+ File targetRepoDir = rpmFactory.getWorkspaceDir(targetWorkspace);
+ List<File> reposToRecreate = new ArrayList<File>();
+
+ stagingChildren: for (File dir : sourceRepoDir.listFiles()) {
+ if (!dir.isDirectory())
+ continue stagingChildren;
+ if (dir.getName().equals("lost+found"))
+ continue stagingChildren;
+
+ File targetDir = new File(targetRepoDir, dir.getName());
+ try {
+ FileUtils.copyDirectory(dir, targetDir);
+ if (log.isDebugEnabled())
+ log.debug(dir + " => " + targetDir);
+ } catch (IOException e) {
+ throw new SlcException(sourceRepoDir
+ + " could not be copied properly, check it manually."
+ + " Metadata have NOT been updated.", e);
+ }
+
+ reposToRecreate.add(dir);
+ reposToRecreate.add(targetDir);
+ File debugInfoDir = new File(dir, debuginfoDirName);
+ if (debugInfoDir.exists())
+ reposToRecreate.add(debugInfoDir);
+ File targetDebugInfoDir = new File(targetDir, debuginfoDirName);
+ if (targetDebugInfoDir.exists())
+ reposToRecreate.add(targetDebugInfoDir);
+
+ }
+
+ // clear staging
+ for (File dir : sourceRepoDir.listFiles()) {
+ try {
+ if (dir.getName().equals("lost+found"))
+ continue;
+ if (dir.isDirectory())
+ FileUtils.deleteDirectory(dir);
+ } catch (IOException e) {
+ log.error("Could not delete " + dir + ". " + e);
+ }
+ }
+
+ // recreate changed repos
+ for (File repoToRecreate : reposToRecreate) {
+ repoToRecreate.mkdirs();
+ SystemCall createrepo = new SystemCall();
+ createrepo.arg("createrepo");
+ // sqllite db
+ createrepo.arg("-d");
+ // debuginfo
+ if (!repoToRecreate.getName().equals(debuginfoDirName))
+ createrepo.arg("-x").arg(debuginfoDirName + "/*");
+ // quiet
+ createrepo.arg("-q");
+ createrepo.arg(repoToRecreate.getAbsolutePath());
+
+ createrepo.setExecutor(executor);
+ createrepo.run();
+ log.info("Updated repo " + repoToRecreate);
+ }
+
+ rpmFactory.indexWorkspace(sourceWorkspace);
+ rpmFactory.indexWorkspace(targetWorkspace);
+ }
+
+ public void setRpmFactory(RpmFactory rpmFactory) {
+ this.rpmFactory = rpmFactory;
+ }
+
+ public void setExecutor(Executor executor) {
+ this.executor = executor;
+ }
+
+ public void setDebuginfoDirName(String debuginfoDirName) {
+ this.debuginfoDirName = debuginfoDirName;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import java.util.List;
+
+/** A consistent distributable set of RPM. */
+public class RpmDistribution {
+ private String id;
+ private List<String> packages;
+ private List<RpmPackageSet> excludedPackages;
+
+ public List<String> getPackages() {
+ return packages;
+ }
+
+ public void setPackages(List<String> packages) {
+ this.packages = packages;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public List<RpmPackageSet> getExcludedPackages() {
+ return excludedPackages;
+ }
+
+ public void setExcludedPackages(List<RpmPackageSet> excludedPackages) {
+ this.excludedPackages = excludedPackages;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.GregorianCalendar;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.jcr.Node;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+import org.apache.commons.io.FileUtils;
+import org.argeo.api.cms.CmsLog;
+import org.argeo.jcr.JcrUtils;
+import org.argeo.slc.SlcConstants;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.repo.NodeIndexerVisitor;
+import org.argeo.slc.rpmfactory.RpmFactory;
+import org.argeo.slc.rpmfactory.RpmRepository;
+import org.argeo.slc.runtime.tasks.SystemCall;
+
+/**
+ * Defines a build environment. This information is typically used by other
+ * components performing the various actions related to RPM build.
+ */
+public class RpmFactoryImpl implements RpmFactory {
+ private CmsLog log = CmsLog.getLog(RpmFactoryImpl.class);
+
+ private Repository rpmRepository;
+ private Repository distRepository;
+
+ private String id;
+ private List<RpmRepository> repositories = new ArrayList<RpmRepository>();
+ private List<String> archs = new ArrayList<String>();
+
+ private String rpmBase = "/mnt/slc/repos/rpm";
+ private String distBase = "/mnt/slc/repos/dist";
+ private String mockVar = "/var/lib/mock";
+ private String mockEtc = "/etc/mock";
+
+ private DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_HHmm");
+
+ private String gitWorkspace = "git";
+
+ private String localUrlBase = "http://localhost:7070/";
+ /** If not null or empty, this is a developer instance. */
+ private String gitDevBaseUrl = null;
+
+ private Boolean withTestingRepository = false;
+
+ private String yumConfigMainSection = "cachedir=/var/cache/yum\n"
+ + "debuglevel=1\n" + "reposdir=/dev/null\n"
+ + "logfile=/var/log/yum.log\n" + "retries=20\n" + "obsoletes=1\n"
+ + "gpgcheck=0\n" + "assumeyes=1\n" + "syslog_ident=mock\n"
+ + "syslog_device=\n" + "http_caching=none\n";
+
+ private String defaultMacroFiles = "/usr/lib/rpm/macros:"
+ + "/usr/lib/rpm/ia32e-linux/macros:"
+ + "/usr/lib/rpm/redhat/macros:" + "/etc/rpm/macros.*:"
+ + "/etc/rpm/macros:" + "/etc/rpm/ia32e-linux/macros:"
+ + "~/.rpmmacros";
+ private Map<String, String> rpmmacros = new HashMap<String, String>();
+
+ // set by init
+ private String proxiedReposBase;
+ private String managedReposBase;
+
+ private String stagingWorkspace;
+ private String testingWorkspace;
+ private String stableWorkspace;
+
+ private File rpmFactoryBaseDir;
+ private File mockConfDir;
+ private File yumConfDir;
+
+ public void init() {
+ // local URL bases
+ proxiedReposBase = localUrlBase + "repo/rpm/";
+ managedReposBase = localUrlBase + "data/public/rpm/";
+
+ // local directories
+ rpmFactoryBaseDir.mkdirs();
+ mockConfDir = new File(rpmFactoryBaseDir.getPath() + "/conf/mock");
+ mockConfDir.mkdirs();
+ yumConfDir = new File(rpmFactoryBaseDir.getPath() + "/conf/yum");
+ yumConfDir.mkdirs();
+
+ // managed repositories
+ stagingWorkspace = id + "-staging";
+ if (withTestingRepository)
+ testingWorkspace = id + "-testing";
+ stableWorkspace = id;
+
+ initDistWorkspace(stableWorkspace);
+ initGitWorkspace();
+ initRpmWorkspace(stagingWorkspace);
+ if (withTestingRepository)
+ initRpmWorkspace(testingWorkspace);
+ initRpmWorkspace(stableWorkspace);
+ }
+
+ protected void initRpmWorkspace(String workspace) {
+ Session session = null;
+ try {
+ session = JcrUtils.loginOrCreateWorkspace(rpmRepository, workspace);
+ JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
+ JcrUtils.addPrivilege(session, "/", SlcConstants.ROLE_SLC,
+ "jcr:all");
+
+ for (String arch : archs) {
+ Node archFolder = JcrUtils.mkfolders(session, "/" + arch);
+ session.save();
+ File workspaceDir = getWorkspaceDir(workspace);
+ try {
+ if (!archFolder.hasNode("repodata")) {
+ // touch a file in order to make sure this is properly
+ // mounted.
+ File touch = new File(workspaceDir, ".touch");
+ touch.createNewFile();
+ touch.delete();
+
+ SystemCall createrepo = new SystemCall();
+ createrepo.arg("createrepo");
+ createrepo.arg("-q");
+ File archDir = new File(workspaceDir, arch);
+ createrepo.arg(archDir.getAbsolutePath());
+ createrepo.run();
+ }
+ } catch (IOException e) {
+ log.error(workspaceDir + " not properly mounted.", e);
+ }
+ }
+ } catch (Exception e) {
+ throw new SlcException("Cannot initialize workspace " + workspace,
+ e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ /** Caller must logout the underlying session. */
+ public Node newDistribution(String distributionId) {
+ Session session = null;
+ try {
+ session = JcrUtils.loginOrCreateWorkspace(rpmRepository,
+ distributionId);
+ JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
+ JcrUtils.addPrivilege(session, "/", SlcConstants.ROLE_SLC,
+ "jcr:all");
+
+ Calendar now = new GregorianCalendar();
+ String folderName = dateFormat.format(now.getTime());
+ return JcrUtils.mkfolders(session, "/" + folderName);
+ } catch (Exception e) {
+ JcrUtils.logoutQuietly(session);
+ throw new SlcException("Cannot initialize distribution workspace "
+ + distributionId, e);
+ }
+ }
+
+ protected void initGitWorkspace() {
+ Session session = null;
+ try {
+ session = JcrUtils.loginOrCreateWorkspace(rpmRepository,
+ gitWorkspace);
+ JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
+ JcrUtils.addPrivilege(session, "/", SlcConstants.ROLE_SLC,
+ "jcr:all");
+ } catch (Exception e) {
+ throw new SlcException("Cannot initialize workspace "
+ + gitWorkspace, e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ protected void initDistWorkspace(String workspace) {
+ Session session = null;
+ try {
+ session = JcrUtils
+ .loginOrCreateWorkspace(distRepository, workspace);
+ JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
+ } catch (RepositoryException e) {
+ throw new SlcException("Cannot initialize workspace " + workspace,
+ e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ public void destroy() {
+
+ }
+
+ public String generateMockConfigFile(String arch, String branch) {
+ StringBuffer buf = new StringBuffer();
+
+ buf.append("config_opts['root'] = '" + getIdWithArch(arch) + "'\n");
+ buf.append("config_opts['target_arch'] = '" + arch + "'\n");
+ buf.append("config_opts['legal_host_arches'] = ('" + arch + "',)\n");
+ buf.append("config_opts['chroot_setup_cmd'] = 'groupinstall buildsys-build'\n");
+ // buf.append("config_opts['dist'] = 'el6'\n");
+ buf.append("config_opts['plugin_conf']['yum_cache_enable'] = False\n");
+
+ buf.append("config_opts['scm'] = False\n");
+ buf.append("config_opts['scm_opts']['method'] = 'git'\n");
+ buf.append("config_opts['scm_opts']['spec'] = 'SCM_PKG.spec'\n");
+ buf.append("config_opts['scm_opts']['ext_src_dir'] = '"
+ + getSourcesDir().getAbsolutePath() + "'\n");
+ buf.append("config_opts['scm_opts']['git_timestamps'] = True\n");
+
+ // development
+ if (gitDevBaseUrl != null && !gitDevBaseUrl.trim().equals(""))
+ buf.append("config_opts['scm_opts']['git_get'] = 'git clone "
+ + (branch != null ? "-b " + branch : "") + " "
+ + gitDevBaseUrl + "/SCM_PKG SCM_PKG'\n");
+ else
+ buf.append("config_opts['scm_opts']['git_get'] = 'git clone "
+ + (branch != null ? "-b " + branch : "") + " "
+ + getGitBaseUrl() + "/SCM_PKG.git SCM_PKG'\n");
+
+ buf.append("\nconfig_opts['yum.conf'] = \"\"\"\n");
+ buf.append(generateYumConfigFile(arch)).append('\n');
+ buf.append("\"\"\"\n");
+ return buf.toString();
+ }
+
+ public String generateYumConfigFile(String arch) {
+ StringBuffer buf = new StringBuffer();
+ buf.append("[main]\n");
+ buf.append(yumConfigMainSection).append('\n');
+
+ for (RpmRepository repository : repositories) {
+ buf.append('[').append(repository.getId()).append("]\n");
+ buf.append("name=").append(repository.getId()).append('\n');
+ if (repository instanceof ThirdPartyRpmRepository) {
+ buf.append("#baseurl=").append(repository.getUrl())
+ .append(arch).append('/').append("\n");
+ buf.append("baseurl=").append(proxiedReposBase)
+ .append(repository.getId()).append('/').append(arch)
+ .append('/').append("\n");
+ if (((ThirdPartyRpmRepository) repository).getYumConf() != null)
+ buf.append(
+ ((ThirdPartyRpmRepository) repository).getYumConf()
+ .trim()).append('\n');
+ }
+ }
+
+ // managed repos
+ addManagedRepository(buf, stagingWorkspace, arch);
+ if (withTestingRepository)
+ addManagedRepository(buf, testingWorkspace, arch);
+ addManagedRepository(buf, stableWorkspace, arch);
+ return buf.toString();
+ }
+
+ protected void addManagedRepository(StringBuffer buf, String workspace,
+ String arch) {
+ buf.append('[').append(workspace).append("]\n");
+ buf.append("baseurl=").append(managedReposBase).append(workspace)
+ .append('/').append(arch).append('/').append("\n");
+ buf.append("gpgcheck=0").append("\n");
+ }
+
+ /** Creates a mock config file. */
+ public File getMockConfigFile(String arch, String branch) {
+ File mockSiteDefaultsFile = new File(mockConfDir, "site-defaults.cfg");
+ File mockLoggingFile = new File(mockConfDir, "logging.ini");
+ File mockConfigFile = new File(mockConfDir, getIdWithArch(arch)
+ + ".cfg");
+ try {
+ if (!mockSiteDefaultsFile.exists())
+ mockSiteDefaultsFile.createNewFile();
+ if (!mockLoggingFile.exists())
+ FileUtils.copyFile(new File(mockEtc + "/logging.ini"),
+ mockLoggingFile);
+
+ FileUtils.writeStringToFile(mockConfigFile,
+ generateMockConfigFile(arch, branch));
+ return mockConfigFile;
+ } catch (IOException e) {
+ throw new SlcException("Cannot write mock config file to "
+ + mockConfigFile, e);
+ }
+ }
+
+ /** Creates a yum config file. */
+ public File getYumRepoFile(String arch) {
+ File yumConfigFile = new File(yumConfDir, getIdWithArch(arch) + ".repo");
+ try {
+ FileUtils.writeStringToFile(yumConfigFile,
+ generateYumConfigFile(arch));
+ return yumConfigFile;
+ } catch (IOException e) {
+ throw new SlcException("Cannot write yum config file to "
+ + yumConfigFile, e);
+ }
+ }
+
+ public File getResultDir(String arch) {
+ return new File(mockVar + "/" + getIdWithArch(arch) + "/result");
+ }
+
+ public File getWorkspaceDir(String workspace) {
+ return new File(rpmBase + "/" + workspace);
+ }
+
+ public File getSourcesDir() {
+ return new File(distBase + "/" + stableWorkspace);
+ }
+
+ public String getMockConfig(String arch) {
+ return getIdWithArch(arch);
+ }
+
+ public String getIdWithArch(String arch) {
+ return id + "-" + arch;
+ }
+
+ public String getGitBaseUrl() {
+ return managedReposBase + gitWorkspace;
+ }
+
+ public void indexWorkspace(String workspace) {
+ Session session = null;
+ try {
+ session = rpmRepository.login(workspace);
+ session.getRootNode().accept(
+ new NodeIndexerVisitor(new RpmIndexer()));
+ if (log.isDebugEnabled())
+ log.debug("Indexed workspace " + workspace);
+ } catch (RepositoryException e) {
+ throw new SlcException("Cannot index workspace " + workspace, e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ public Boolean isDeveloperInstance() {
+ return gitDevBaseUrl != null;
+ }
+
+ /** Write (topdir)/rpmmacros and (topdir)/rpmrc */
+ public void writeRpmbuildConfigFiles(File topdir) {
+ writeRpmbuildConfigFiles(topdir, new File(topdir, "rpmmacros"),
+ new File(topdir, "rpmrc"));
+ }
+
+ public void writeRpmbuildConfigFiles(File topdir, File rpmmacroFile,
+ File rpmrcFile) {
+ try {
+ List<String> macroLines = new ArrayList<String>();
+ macroLines.add("%_topdir " + topdir.getCanonicalPath());
+ for (String macroKey : rpmmacros.keySet()) {
+ macroLines.add(macroKey + " " + rpmmacros.get(macroKey));
+ }
+ FileUtils.writeLines(rpmmacroFile, macroLines);
+
+ List<String> rpmrcLines = new ArrayList<String>();
+ rpmrcLines.add("include: /usr/lib/rpm/rpmrc");
+ rpmrcLines.add("macrofiles: " + defaultMacroFiles + ":"
+ + rpmmacroFile.getCanonicalPath());
+ FileUtils.writeLines(rpmrcFile, rpmrcLines);
+ } catch (IOException e) {
+ throw new SlcException("Cannot write rpmbuild config files", e);
+ }
+
+ }
+
+ public Map<String, String> getRpmmacros() {
+ return rpmmacros;
+ }
+
+ public void setRpmmacros(Map<String, String> rpmmacros) {
+ this.rpmmacros = rpmmacros;
+ }
+
+ public String getDefaultMacroFiles() {
+ return defaultMacroFiles;
+ }
+
+ public void setDefaultMacroFiles(String defaultMacroFiles) {
+ this.defaultMacroFiles = defaultMacroFiles;
+ }
+
+ public void setArchs(List<String> archs) {
+ this.archs = archs;
+ }
+
+ public List<String> getArchs() {
+ return archs;
+ }
+
+ public void setRpmBase(String stagingBase) {
+ this.rpmBase = stagingBase;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public void setMockVar(String mockVar) {
+ this.mockVar = mockVar;
+ }
+
+ public void setRpmRepository(Repository rpmRepository) {
+ this.rpmRepository = rpmRepository;
+ }
+
+ public void setDistRepository(Repository distRepository) {
+ this.distRepository = distRepository;
+ }
+
+ public void setLocalUrlBase(String localUrlBase) {
+ this.localUrlBase = localUrlBase;
+ }
+
+ public void setYumConfigMainSection(String yumConfigMainSection) {
+ this.yumConfigMainSection = yumConfigMainSection;
+ }
+
+ public void setRepositories(List<RpmRepository> repositories) {
+ this.repositories = repositories;
+ }
+
+ public void setRpmFactoryBaseDir(File rpmFactoryBaseDir) {
+ this.rpmFactoryBaseDir = rpmFactoryBaseDir;
+ }
+
+ public String getStagingWorkspace() {
+ return stagingWorkspace;
+ }
+
+ public String getTestingWorkspace() {
+ return testingWorkspace;
+ }
+
+ public String getStableWorkspace() {
+ return stableWorkspace;
+ }
+
+ public void setWithTestingRepository(Boolean withTestingRepository) {
+ this.withTestingRepository = withTestingRepository;
+ }
+
+ public void setGitDevBaseUrl(String gitBaseUrl) {
+ this.gitDevBaseUrl = gitBaseUrl;
+ }
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import static org.redline_rpm.header.Header.HeaderTag.HEADERIMMUTABLE;
+import static org.redline_rpm.header.Signature.SignatureTag.SIGNATURES;
+
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.channels.Channels;
+
+import javax.jcr.Node;
+import javax.jcr.Property;
+import javax.jcr.nodetype.NodeType;
+
+import org.apache.commons.io.FilenameUtils;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.SlcNames;
+import org.argeo.slc.SlcTypes;
+import org.argeo.slc.repo.NodeIndexer;
+import org.redline_rpm.ChannelWrapper.Key;
+import org.redline_rpm.ReadableChannelWrapper;
+import org.redline_rpm.header.AbstractHeader;
+import org.redline_rpm.header.Format;
+import org.redline_rpm.header.Header;
+
+/** Indexes an RPM file. */
+public class RpmIndexer implements NodeIndexer, SlcNames {
+ private Boolean force = false;
+
+ @Override
+ public Boolean support(String path) {
+ return FilenameUtils.getExtension(path).equals("rpm");
+ }
+
+ @Override
+ public void index(Node node) {
+ try {
+ if (!support(node.getPath()))
+ return;
+
+ // Already indexed
+ if (!force && node.isNodeType(SlcTypes.SLC_RPM))
+ return;
+
+ if (!node.isNodeType(NodeType.NT_FILE))
+ return;
+
+ InputStream in = node.getNode(Node.JCR_CONTENT)
+ .getProperty(Property.JCR_DATA).getBinary().getStream();
+ ReadableChannelWrapper channel = new ReadableChannelWrapper(
+ Channels.newChannel(in));
+ Format format = readRpmInfo(channel);
+
+ node.addMixin(SlcTypes.SLC_RPM);
+ node.setProperty(SLC_NAME, readTag(format, Header.HeaderTag.NAME));
+ String rpmVersion = readTag(format, Header.HeaderTag.VERSION);
+ String rpmRelease = readTag(format, Header.HeaderTag.RELEASE);
+ node.setProperty(SLC_RPM_VERSION, rpmVersion);
+ node.setProperty(SLC_RPM_RELEASE, rpmRelease);
+ node.setProperty(SLC_VERSION, rpmVersion + "-" + rpmRelease);
+
+ String arch = readTag(format, Header.HeaderTag.ARCH);
+ if (arch != null)
+ node.setProperty(SLC_RPM_ARCH, arch);
+
+ String archiveSize = readTag(format, Header.HeaderTag.ARCHIVESIZE);
+ if (archiveSize != null)
+ node.setProperty(SLC_RPM_ARCHIVE_SIZE,
+ Long.parseLong(archiveSize));
+
+ node.getSession().save();
+ } catch (Exception e) {
+ throw new SlcException("Cannot index " + node, e);
+ }
+
+ }
+
+ @SuppressWarnings("unused")
+ public Format readRpmInfo(ReadableChannelWrapper channel) throws Exception {
+ Format format = new Format();
+
+ Key<Integer> lead = channel.start();
+ format.getLead().read(channel);
+ // System.out.println( "Lead ended at '" + in.finish( lead) + "'.");
+
+ Key<Integer> signature = channel.start();
+ int count = format.getSignature().read(channel);
+ int expected = ByteBuffer
+ .wrap((byte[]) format.getSignature().getEntry(SIGNATURES)
+ .getValues(), 8, 4).getInt()
+ / -16;
+ // System.out.println( "Signature ended at '" + in.finish( signature) +
+ // "' and contained '" + count + "' headers (expected '" + expected +
+ // "').");
+
+ Key<Integer> header = channel.start();
+ count = format.getHeader().read(channel);
+ expected = ByteBuffer.wrap(
+ (byte[]) format.getHeader().getEntry(HEADERIMMUTABLE)
+ .getValues(), 8, 4).getInt()
+ / -16;
+ // System.out.println( "Header ended at '" + in.finish( header) +
+ // " and contained '" + count + "' headers (expected '" + expected +
+ // "').");
+
+ return format;
+ }
+
+ private String readTag(Format format, Header.HeaderTag tag) {
+ AbstractHeader.Entry<?> entry = format.getHeader().getEntry(tag);
+ if (entry == null)
+ return null;
+ if (entry.getValues() == null)
+ return null;
+ Object[] values = (Object[]) entry.getValues();
+ return values[0].toString().trim();
+ }
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+/** Set of RPM packages */
+public interface RpmPackageSet {
+ public Boolean contains(String rpmPackage);
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Set;
+
+import javax.jcr.Node;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+import javax.jcr.nodetype.NodeType;
+import javax.jcr.security.AccessControlException;
+
+import org.argeo.api.cms.CmsLog;
+import org.argeo.cms.ArgeoNames;
+import org.argeo.jcr.JcrUtils;
+import org.argeo.jcr.proxy.AbstractUrlProxy;
+import org.argeo.slc.SlcConstants;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.SlcNames;
+import org.argeo.slc.SlcTypes;
+import org.argeo.slc.repo.RepoConstants;
+import org.argeo.slc.rpmfactory.RpmProxyService;
+import org.argeo.slc.rpmfactory.RpmRepository;
+
+/** Synchronises the node repository with remote Maven repositories */
+public class RpmProxyServiceImpl extends AbstractUrlProxy implements
+ RpmProxyService, ArgeoNames, SlcNames {
+ private final static CmsLog log = CmsLog.getLog(RpmProxyServiceImpl.class);
+
+ private Set<RpmRepository> defaultRepositories = new HashSet<RpmRepository>();
+
+ @Override
+ protected void beforeInitSessionSave(Session session)
+ throws RepositoryException {
+ JcrUtils.addPrivilege(session, "/", "anonymous", "jcr:read");
+ try {
+ JcrUtils.addPrivilege(session, "/", SlcConstants.ROLE_SLC,
+ "jcr:all");
+ } catch (AccessControlException e) {
+ if (log.isTraceEnabled())
+ log.trace("Cannot give jcr:all privileges to "+SlcConstants.ROLE_SLC);
+ }
+
+ JcrUtils.mkdirsSafe(session, RepoConstants.PROXIED_REPOSITORIES);
+ }
+
+ /**
+ * Retrieve and add this file to the repository
+ */
+ @Override
+ protected Node retrieve(Session session, String path) {
+ StringBuilder relativePathBuilder = new StringBuilder();
+ String repoId = extractRepoId(path, relativePathBuilder);
+ // remove starting '/'
+ String relativePath = relativePathBuilder.toString().substring(1);
+
+ RpmRepository sourceRepo = null;
+ for (Iterator<RpmRepository> reposIt = defaultRepositories.iterator(); reposIt
+ .hasNext();) {
+ RpmRepository rpmRepo = reposIt.next();
+ if (rpmRepo.getId().equals(repoId)) {
+ sourceRepo = rpmRepo;
+ break;
+ }
+ }
+
+ if (sourceRepo == null)
+ throw new SlcException("No RPM repository found for " + path);
+
+ try {
+ String baseUrl = sourceRepo.getUrl();
+ String remoteUrl = baseUrl + relativePath;
+ Node node = proxyUrl(session, remoteUrl, path);
+ if (node != null) {
+ registerSource(sourceRepo, node, remoteUrl);
+ if (log.isDebugEnabled())
+ log.debug("Imported " + remoteUrl + " to " + node);
+ return node;
+ }
+ } catch (Exception e) {
+ throw new SlcException("Cannot proxy " + path, e);
+ }
+ JcrUtils.discardQuietly(session);
+ throw new SlcException("No proxy found for " + path);
+ }
+
+ protected void registerSource(RpmRepository sourceRepo, Node node,
+ String remoteUrl) throws RepositoryException {
+ node.addMixin(SlcTypes.SLC_KNOWN_ORIGIN);
+ Node origin;
+ if (!node.hasNode(SLC_ORIGIN))
+ origin = node.addNode(SLC_ORIGIN, SlcTypes.SLC_PROXIED);
+ else
+ origin = node.getNode(SLC_ORIGIN);
+
+ // proxied repository
+ Node proxiedRepository;
+ String proxiedRepositoryPath = RepoConstants.PROXIED_REPOSITORIES + '/'
+ + sourceRepo.getId();
+ Session session = node.getSession();
+ if (session.itemExists(proxiedRepositoryPath)) {
+ proxiedRepository = session.getNode(proxiedRepositoryPath);
+ } else {
+ proxiedRepository = session.getNode(
+ RepoConstants.PROXIED_REPOSITORIES).addNode(
+ sourceRepo.getId());
+ proxiedRepository.addMixin(NodeType.MIX_REFERENCEABLE);
+ JcrUtils.urlToAddressProperties(proxiedRepository,
+ sourceRepo.getUrl());
+ proxiedRepository.setProperty(SLC_URL, sourceRepo.getUrl());
+ }
+
+ origin.setProperty(SLC_PROXY, proxiedRepository);
+ JcrUtils.urlToAddressProperties(origin, remoteUrl);
+ }
+
+ /** Returns the first token of the path */
+ protected String extractRepoId(String path, StringBuilder relativePath) {
+ StringBuilder workspace = new StringBuilder();
+ StringBuilder buf = workspace;
+ for (int i = 1; i < path.length(); i++) {
+ char c = path.charAt(i);
+ if (c == '/') {
+ buf = relativePath;
+ }
+ buf.append(c);
+ }
+ return workspace.toString();
+ }
+
+ @Override
+ protected Boolean shouldUpdate(Session clientSession, String nodePath) {
+ // if (nodePath.contains("/repodata/"))
+ // return true;
+ return super.shouldUpdate(clientSession, nodePath);
+ }
+
+ public void setDefaultRepositories(Set<RpmRepository> defaultRepositories) {
+ this.defaultRepositories = defaultRepositories;
+ }
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class RpmSpecFile {
+ private Path specFile;
+
+ private String name;
+ private String version;
+ private String release;
+ private Map<String, String> sources = new HashMap<String, String>();
+ private Map<String, String> patches = new HashMap<String, String>();
+
+ public RpmSpecFile(Path specFile) {
+ this.specFile = specFile;
+ parseSpecFile();
+ }
+
+ public void init() {
+ parseSpecFile();
+ }
+
+ protected void parseSpecFile() {
+ try {
+ List<String> lines = (List<String>) Files.readAllLines(specFile);
+
+ lines: for (String line : lines) {
+ int indexSemiColon = line.indexOf(':');
+ if (indexSemiColon <= 0)
+ continue lines;
+ String directive = line.substring(0, indexSemiColon).trim();
+ String value = line.substring(indexSemiColon + 1).trim();
+ if ("name".equals(directive.toLowerCase()))
+ name = value;
+ else if ("version".equals(directive.toLowerCase()))
+ version = value;
+ else if ("release".equals(directive.toLowerCase()))
+ release = value;
+ else if (directive.toLowerCase().startsWith("source"))
+ sources.put(directive, interpret(value));
+ else if (directive.toLowerCase().startsWith("patch"))
+ patches.put(directive, interpret(value));
+ }
+
+ } catch (IOException e) {
+ throw new RuntimeException("Cannot parse spec file " + specFile, e);
+ }
+ }
+
+ protected String interpret(String value) {
+ StringBuffer buf = new StringBuffer(value.length());
+ StringBuffer currKey = null;
+ boolean mayBeKey = false;
+ chars: for (char c : value.toCharArray()) {
+ if (c == '%')
+ mayBeKey = true;
+ else if (c == '{') {
+ if (mayBeKey)
+ currKey = new StringBuffer();
+ } else if (c == '}') {
+ if (currKey == null)
+ continue chars;
+ String key = currKey.toString();
+ if ("name".equals(key.toLowerCase()))
+ buf.append(name);
+ else if ("version".equals(key.toLowerCase()))
+ buf.append(version);
+ else
+ buf.append("%{").append(key).append('}');
+ currKey = null;
+ } else {
+ if (currKey != null)
+ currKey.append(c);
+ else
+ buf.append(c);
+ }
+ }
+ return buf.toString();
+ }
+
+ public Path getSpecFile() {
+ return specFile;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public String getRelease() {
+ return release;
+ }
+
+ public Map<String, String> getSources() {
+ return sources;
+ }
+
+ public Map<String, String> getPatches() {
+ return patches;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+/** Local build repository, used only for builds. */
+public class StagingRpmRepository extends AbstractRpmRepository {
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+/**
+ * A repository of third party RPMs used for the build. RPM used by the builds
+ * will be cached within the system.
+ */
+public class ThirdPartyRpmRepository extends AbstractRpmRepository {
+ private String yumConf;
+
+ public String getYumConf() {
+ return yumConf;
+ }
+
+ public void setYumConf(String yumConf) {
+ this.yumConf = yumConf;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.rpmfactory.core;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Set;
+import java.util.StringTokenizer;
+import java.util.TreeSet;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.io.LineIterator;
+import org.argeo.api.cms.CmsLog;
+import org.argeo.slc.SlcException;
+
+/**
+ * Reads the output of a 'yum list all' command and interpret the list of
+ * packages.
+ */
+public class YumListParser implements RpmPackageSet {
+ private final static CmsLog log = CmsLog.getLog(YumListParser.class);
+
+ private Set<String> installed = new TreeSet<String>();
+ /** Not installed but available */
+ private Set<String> installable = new TreeSet<String>();
+
+ private Path yumListOutput;
+
+ public void init() {
+ if (yumListOutput != null) {
+ try (InputStream in = Files.newInputStream(yumListOutput)) {
+ load(in);
+ if (log.isDebugEnabled())
+ log.debug(installed.size() + " installed, " + installable.size() + " installable, from "
+ + yumListOutput);
+ } catch (IOException e) {
+ throw new SlcException("Cannot initialize yum list parser", e);
+ }
+ }
+ }
+
+ public Boolean contains(String packageName) {
+ if (installed.contains(packageName))
+ return true;
+ else
+ return installable.contains(packageName);
+ }
+
+ protected void load(InputStream in) throws IOException {
+ Boolean readingInstalled = false;
+ Boolean readingAvailable = false;
+ LineIterator it = IOUtils.lineIterator(in, "UTF-8");
+ while (it.hasNext()) {
+ String line = it.nextLine();
+ if (line.trim().equals("Installed Packages")) {
+ readingInstalled = true;
+ } else if (line.trim().equals("Available Packages")) {
+ readingAvailable = true;
+ readingInstalled = false;
+ } else if (readingAvailable) {
+ if (Character.isLetterOrDigit(line.charAt(0))) {
+ installable.add(extractRpmName(line));
+ }
+ } else if (readingInstalled) {
+ if (Character.isLetterOrDigit(line.charAt(0))) {
+ installed.add(extractRpmName(line));
+ }
+ }
+ }
+ }
+
+ protected String extractRpmName(String line) {
+ StringTokenizer st = new StringTokenizer(line, " \t");
+ String packageName = st.nextToken();
+ // consider the arch as an extension
+ return FilenameUtils.getBaseName(packageName);
+ // return packageName.split("\\.")[0];
+ }
+
+ public Set<String> getInstalled() {
+ return installed;
+ }
+
+ public Set<String> getInstallable() {
+ return installable;
+ }
+
+ public void setYumListOutput(Path yumListOutput) {
+ this.yumListOutput = yumListOutput;
+ }
+
+}
<module>org.argeo.slc.runtime</module>
<module>org.argeo.slc.jcr</module>
<module>org.argeo.slc.repo</module>
- <module>org.argeo.slc.factory</module>
+ <module>org.argeo.slc.rpmfactory</module>
<!-- CMS extensions -->
<module>cms</module>
<?pde version="3.8"?>
<target name="argeo-tp-rap">
<locations>
- <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp" type="Directory"/>
- <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.apache" type="Directory"/>
- <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.eclipse.equinox" type="Directory"/>
- <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.eclipse.rap" type="Directory"/>
- <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.jetty" type="Directory"/>
- <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.sdk" type="Directory"/>
- <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.jcr" type="Directory"/>
+ <location path="/usr/share/a2/org.argeo.tp" type="Directory"/>
+ <location path="/usr/share/a2/org.argeo.tp.apache" type="Directory"/>
+ <location path="/usr/share/a2/org.argeo.tp.eclipse.equinox" type="Directory"/>
+ <location path="/usr/share/a2/org.argeo.tp.eclipse.rap" type="Directory"/>
+ <location path="/usr/share/a2/org.argeo.tp.jetty" type="Directory"/>
+ <location path="/usr/share/a2/org.argeo.tp.sdk" type="Directory"/>
+ <location path="/usr/share/a2/org.argeo.tp.jcr" type="Directory"/>
</locations>
</target>
\ No newline at end of file
--- /dev/null
+DIST_PKGS = \
+argeo-tp-base \
+argeo-tp-equinox \
+argeo-tp-jetty \
+argeo-tp-rap \
+argeo-tp-jcr \
+argeo-tp-sdk \
+
+DEB_DIRS = $(DIST_PKGS:%=$(SDK_BUILD_BASE)/build/deb/%)
+DEB_PKGS = $(DIST_PKGS:%=$(SDK_BUILD_BASE)/deb/%.deb)
+
+
+deb: $(DEB_PKGS)
+ cd $(SDK_BUILD_BASE)/deb && dpkg-scanpackages . | gzip > Packages.gz
+
+$(SDK_BUILD_BASE)/deb/%.deb : $(SDK_BUILD_BASE)/build/deb/%/DEBIAN/control
+ echo Build $@
+
+$(SDK_BUILD_BASE)/build/deb/%/DEBIAN/control : $(SDK_SRC_BASE)/sdk/deb/%.control prepare-deb
+ cp $< $@
+ dpkg-deb --build --root-owner-group $(dir $@)/.. $(SDK_BUILD_BASE)/deb
+
+prepare-deb:
+ mkdir -p $(foreach deb_dir, $(DEB_DIRS), $(deb_dir)/DEBIAN)
+ mkdir -p $(foreach deb_dir, $(DEB_DIRS), $(deb_dir)/usr/share/a2)
+ rsync -av $(SDK_BUILD_BASE)/a2/org.argeo.tp $(SDK_BUILD_BASE)/build/deb/argeo-tp-base/usr/share/a2
+ rsync -av $(SDK_BUILD_BASE)/a2/org.argeo.tp.apache $(SDK_BUILD_BASE)/build/deb/argeo-tp-base/usr/share/a2
+ rsync -av $(SDK_BUILD_BASE)/a2/org.argeo.tp.eclipse.equinox $(SDK_BUILD_BASE)/build/deb/argeo-tp-equinox/usr/share/a2
+ rsync -av $(SDK_BUILD_BASE)/a2/org.argeo.tp.jetty $(SDK_BUILD_BASE)/build/deb/argeo-tp-jetty/usr/share/a2
+ rsync -av $(SDK_BUILD_BASE)/a2/org.argeo.tp.eclipse.rap $(SDK_BUILD_BASE)/build/deb/argeo-tp-rap/usr/share/a2
+ rsync -av $(SDK_BUILD_BASE)/a2/org.argeo.tp.jcr $(SDK_BUILD_BASE)/build/deb/argeo-tp-jcr/usr/share/a2
+ rsync -av $(SDK_BUILD_BASE)/a2/org.argeo.tp.sdk $(SDK_BUILD_BASE)/build/deb/argeo-tp-sdk/usr/share/a2
+
+
--- /dev/null
+Package: argeo-tp-base
+Version: 2.3.4
+Architecture: all
+Maintainer: Mathieu Baudier <mbaudier@argeo.org>
+Description: Base third parties which are GPL compatible
--- /dev/null
+Package: argeo-tp-equinox
+Version: 2.3.4
+Architecture: all
+Maintainer: Mathieu Baudier <mbaudier@argeo.org>
+Description: Eclipse Equinox OSGi runtime
--- /dev/null
+Package: argeo-tp-jcr
+Version: 2.3.4
+Architecture: all
+Maintainer: Mathieu Baudier <mbaudier@argeo.org>
+Description: Java Content Repository API (NON FREE!!) and Apache Jackrabbit
--- /dev/null
+Package: argeo-tp-jetty
+Version: 2.3.4
+Architecture: all
+Maintainer: Mathieu Baudier <mbaudier@argeo.org>
+Description: Jetty HTTP server
--- /dev/null
+Package: argeo-tp-rap
+Version: 2.3.4
+Architecture: all
+Maintainer: Mathieu Baudier <mbaudier@argeo.org>
+Description: Eclipse RAP
--- /dev/null
+Package: argeo-tp-sdk
+Version: 2.3.4
+Architecture: all
+Maintainer: Mathieu Baudier <mbaudier@argeo.org>
+Description: Libraries required by the build and test tools
+include $(SDK_SRC_BASE)/sdk/deb.mk
--- /dev/null
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?pde version="3.8"?>
+<target name="(output) argeo-tp-rap">
+ <locations>
+ <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp" type="Directory"/>
+ <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.apache" type="Directory"/>
+ <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.eclipse.equinox" type="Directory"/>
+ <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.eclipse.rap" type="Directory"/>
+ <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.jetty" type="Directory"/>
+ <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.sdk" type="Directory"/>
+ <location path="${project_loc:argeo-slc-unstable}/output/a2/org.argeo.tp.jcr" type="Directory"/>
+ </locations>
+</target>
\ No newline at end of file
--- /dev/null
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import org.argeo.slc.factory.A2Factory;
+
+class Make {
+ public static void main(String[] args) {
+ Path originBase = Paths.get("./output/origin").toAbsolutePath().normalize();
+ Path factoryBase = Paths.get("./output/a2").toAbsolutePath().normalize();
+ A2Factory factory = new A2Factory(originBase, factoryBase);
+
+ Path descriptorsBase = Paths.get("./tp").toAbsolutePath().normalize();
+
+// factory.processSingleM2ArtifactDistributionUnit(descriptorsBase.resolve("org.argeo.tp.apache").resolve("org.apache.xml.resolver.bnd"));
+// factory.processM2BasedDistributionUnit(descriptorsBase.resolve("org.argeo.tp/slf4j"));
+// System.exit(0);
+
+ // Eclipse
+ factory.processEclipseArchive(
+ descriptorsBase.resolve("org.argeo.tp.eclipse.equinox").resolve("eclipse-equinox"));
+ factory.processEclipseArchive(descriptorsBase.resolve("org.argeo.tp.eclipse.rap").resolve("eclipse-rap"));
+ factory.processEclipseArchive(descriptorsBase.resolve("org.argeo.tp.eclipse.rcp").resolve("eclipse-rcp"));
+
+ // Maven
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp.sdk"));
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp"));
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp.apache"));
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp.jetty"));
+ factory.processCategory(descriptorsBase.resolve("org.argeo.tp.jcr"));
+ }
+
+}
\ No newline at end of file