--- /dev/null
+package org.argeo.slc.repo;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.Enumeration;
+import java.util.Iterator;
+import java.util.StringTokenizer;
+import java.util.jar.Attributes;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.jar.JarInputStream;
+import java.util.jar.JarOutputStream;
+import java.util.jar.Manifest;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.slc.BasicNameVersion;
+import org.argeo.slc.NameVersion;
+import org.sonatype.aether.artifact.Artifact;
+
+/** Utilities around repo */
+public class RepoUtils {
+ private final static Log log = LogFactory.getLog(RepoUtils.class);
+
+ /** Packages a regular sources jar as PDE source. */
+ public static void packagesAsPdeSource(File sourceFile,
+ NameVersion nameVersion, OutputStream out) throws IOException {
+ if (isAlreadyPdeSource(sourceFile)) {
+ FileInputStream in = new FileInputStream(sourceFile);
+ IOUtils.copy(in, out);
+ IOUtils.closeQuietly(in);
+ } else {
+ String sourceSymbolicName = nameVersion.getName() + ".source";
+
+ Manifest sourceManifest = null;
+ sourceManifest = new Manifest();
+ sourceManifest.getMainAttributes().put(
+ Attributes.Name.MANIFEST_VERSION, "1.0");
+ sourceManifest.getMainAttributes().putValue("Bundle-SymbolicName",
+ sourceSymbolicName);
+ sourceManifest.getMainAttributes().putValue("Bundle-Version",
+ nameVersion.getVersion());
+ sourceManifest.getMainAttributes().putValue(
+ "Eclipse-SourceBundle",
+ nameVersion.getName() + ";version="
+ + nameVersion.getVersion());
+ copyJar(sourceFile, out, sourceManifest);
+ }
+ }
+
+ /**
+ * Check whether the file as already been packaged as PDE source, in order
+ * not to mess with Jar signing
+ */
+ private static boolean isAlreadyPdeSource(File sourceFile) {
+ JarInputStream jarInputStream = null;
+
+ try {
+ jarInputStream = new JarInputStream(new FileInputStream(sourceFile));
+
+ Manifest manifest = jarInputStream.getManifest();
+ Iterator<?> it = manifest.getMainAttributes().keySet().iterator();
+ boolean res = false;
+ // containsKey() does not work, iterating...
+ while (it.hasNext())
+ if (it.next().toString().equals("Eclipse-SourceBundle")) {
+ res = true;
+ break;
+ }
+ // boolean res = manifest.getMainAttributes().get(
+ // "Eclipse-SourceBundle") != null;
+ if (res)
+ log.info(sourceFile + " is already a PDE source");
+ return res;
+ } catch (Exception e) {
+ // probably not a jar, skipping
+ if (log.isDebugEnabled())
+ log.debug("Skipping " + sourceFile + " because of "
+ + e.getMessage());
+ return false;
+ } finally {
+ IOUtils.closeQuietly(jarInputStream);
+ }
+ }
+
+ /**
+ * Copy a jar, replacing its manifest with the provided one
+ *
+ * @param manifest
+ * can be null
+ */
+ private static void copyJar(File source, OutputStream out, Manifest manifest)
+ throws IOException {
+ JarFile sourceJar = null;
+ JarOutputStream output = null;
+ try {
+ output = manifest != null ? new JarOutputStream(out, manifest)
+ : new JarOutputStream(out);
+ sourceJar = new JarFile(source);
+
+ entries: for (Enumeration<?> entries = sourceJar.entries(); entries
+ .hasMoreElements();) {
+ JarEntry entry = (JarEntry) entries.nextElement();
+ if (manifest != null
+ && entry.getName().equals("META-INF/MANIFEST.MF"))
+ continue entries;
+
+ InputStream entryStream = sourceJar.getInputStream(entry);
+ JarEntry newEntry = new JarEntry(entry.getName());
+ // newEntry.setMethod(JarEntry.DEFLATED);
+ output.putNextEntry(newEntry);
+ IOUtils.copy(entryStream, output);
+ }
+ } finally {
+ IOUtils.closeQuietly(output);
+ try {
+ if (sourceJar != null)
+ sourceJar.close();
+ } catch (IOException e) {
+ // silent
+ }
+ }
+ }
+
+ /** Read the OSGi {@link NameVersion} */
+ public static NameVersion readNameVersion(Artifact artifact) {
+ File artifactFile = artifact.getFile();
+ if (artifact.getExtension().equals("pom")) {
+ // hack to process jars which weirdly appear as POMs
+ File jarFile = new File(artifactFile.getParentFile(),
+ FilenameUtils.getBaseName(artifactFile.getPath()) + ".jar");
+ if (jarFile.exists()) {
+ log.warn("Use " + jarFile + " instead of " + artifactFile
+ + " for " + artifact);
+ artifactFile = jarFile;
+ }
+ }
+ return readNameVersion(artifactFile);
+ }
+
+ /** Read the OSGi {@link NameVersion} */
+ public static NameVersion readNameVersion(File artifactFile) {
+ JarInputStream jarInputStream = null;
+
+ try {
+ BasicNameVersion nameVersion = new BasicNameVersion();
+ jarInputStream = new JarInputStream(new FileInputStream(
+ artifactFile));
+ nameVersion.setName(jarInputStream.getManifest()
+ .getMainAttributes().getValue("Bundle-SymbolicName"));
+
+ // Skip additional specs such as
+ // ; singleton:=true
+ if (nameVersion.getName().indexOf(';') > -1) {
+ nameVersion.setName(new StringTokenizer(nameVersion.getName(),
+ " ;").nextToken());
+ }
+
+ nameVersion.setVersion(jarInputStream.getManifest()
+ .getMainAttributes().getValue("Bundle-Version"));
+
+ return nameVersion;
+ } catch (Exception e) {
+ // probably not a jar, skipping
+ if (log.isDebugEnabled()) {
+ log.debug("Skipping " + artifactFile + " because of " + e);
+ // e.printStackTrace();
+ }
+ } finally {
+ IOUtils.closeQuietly(jarInputStream);
+ }
+ return null;
+ }
+
+ private RepoUtils() {
+ }
+}
import java.io.ByteArrayOutputStream;
import java.io.File;
-import java.io.FileInputStream;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import java.util.TreeSet;
-import javax.jcr.Binary;
import javax.jcr.Node;
import javax.jcr.NodeIterator;
-import javax.jcr.Property;
import javax.jcr.Repository;
-import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.nodetype.NodeType;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
+import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.argeo.jcr.JcrUtils;
+import org.argeo.slc.NameVersion;
import org.argeo.slc.SlcException;
import org.argeo.slc.aether.AetherTemplate;
-import org.argeo.slc.jcr.SlcNames;
-import org.argeo.slc.jcr.SlcTypes;
import org.argeo.slc.repo.ArtifactIndexer;
import org.argeo.slc.repo.JarFileIndexer;
import org.argeo.slc.repo.RepoConstants;
+import org.argeo.slc.repo.RepoUtils;
import org.sonatype.aether.artifact.Artifact;
import org.sonatype.aether.graph.DependencyNode;
import org.sonatype.aether.util.artifact.DefaultArtifact;
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.NodeList;
+/**
+ * Import all the dependencies listed in a POM and their dependency graphs to a
+ * workspace.
+ */
public class ImportMavenDependencies implements Runnable {
private final static Log log = LogFactory
.getLog(ImportMavenDependencies.class);
private String workspace;
private String artifactBasePath = RepoConstants.ARTIFACTS_BASE_PATH;
- private String distributionsBasePath = RepoConstants.DISTRIBUTIONS_BASE_PATH;
- private String distributionName;
private ArtifactIndexer artifactIndexer = new ArtifactIndexer();
private JarFileIndexer jarFileIndexer = new JarFileIndexer();
+ private Comparator<Artifact> artifactComparator = new Comparator<Artifact>() {
+ public int compare(Artifact o1, Artifact o2) {
+ return o1.getArtifactId().compareTo(o2.getArtifactId());
+ }
+ };
public void run() {
// resolve
}
}
- public Set<Artifact> resolveDistribution() {
+ private Set<Artifact> resolveDistribution() {
try {
Artifact pomArtifact = new DefaultArtifact(rootCoordinates);
- Comparator<Artifact> artifactComparator = new Comparator<Artifact>() {
- public int compare(Artifact o1, Artifact o2) {
- return o1.getArtifactId().compareTo(o2.getArtifactId());
- }
- };
Set<Artifact> registeredArtifacts = new TreeSet<Artifact>(
artifactComparator);
- parsePom(aetherTemplate, registeredArtifacts, pomArtifact);
+ MavenConventionsUtils.gatherPomDependencies(aetherTemplate,
+ registeredArtifacts, pomArtifact);
if (log.isDebugEnabled())
log.debug("Gathered " + registeredArtifacts.size()
+ " artifacts");
Set<Artifact> artifacts = new TreeSet<Artifact>(artifactComparator);
for (Artifact artifact : registeredArtifacts) {
try {
- addArtifact(artifacts, artifact);
- DependencyNode node = aetherTemplate
- .resolveDependencies(artifact);
- addDependencies(artifacts, node);
+ Boolean wasAdded = addArtifact(artifacts, artifact);
+ if (wasAdded) {
+ DependencyNode node = aetherTemplate
+ .resolveDependencies(artifact);
+ addDependencies(artifacts, node, null);
+ }
} catch (Exception e) {
log.error("Could not resolve dependencies of " + artifact
+ ": " + e.getCause().getMessage());
if (log.isDebugEnabled())
log.debug("Resolved " + artifacts.size() + " artifacts");
- Properties distributionDescriptor = new Properties();
- for (Artifact artifact : artifacts) {
- log.debug(artifact.getArtifactId() + " ["
- + artifact.getVersion() + "]\t(" + artifact + ")");
- distributionDescriptor.setProperty(artifact.getArtifactId()
- + ":" + artifact.getVersion(), artifact.toString());
- }
- ByteArrayOutputStream out = new ByteArrayOutputStream();
- distributionDescriptor.store(out, "");
- log.debug(new String(out.toByteArray()));
- out.close();
+ // distribution descriptor
+ // Properties distributionDescriptor =
+ // generateDistributionDescriptor(artifacts);
+ // ByteArrayOutputStream out = new ByteArrayOutputStream();
+ // distributionDescriptor.store(out, "");
+ // log.debug(new String(out.toByteArray()));
+ // out.close();
return artifacts;
} catch (Exception e) {
}
}
- protected void syncDistribution(Session jcrSession, Set<Artifact> artifacts) {
+ protected Properties generateDistributionDescriptor(Set<Artifact> artifacts) {
+ Properties distributionDescriptor = new Properties();
+ for (Artifact artifact : artifacts) {
+ log.debug(artifact.getArtifactId() + " [" + artifact.getVersion()
+ + "]\t(" + artifact + ")");
+ distributionDescriptor.setProperty(artifact.getArtifactId() + ":"
+ + artifact.getVersion(), artifact.toString());
+ }
+ return distributionDescriptor;
+ }
+
+ private void syncDistribution(Session jcrSession, Set<Artifact> artifacts) {
+ Set<Artifact> artifactsWithoutSources = new TreeSet<Artifact>(
+ artifactComparator);
Long begin = System.currentTimeMillis();
try {
JcrUtils.mkdirs(jcrSession, artifactBasePath);
- JcrUtils.mkdirs(jcrSession, distributionsBasePath + '/'
- + distributionName);
artifacts: for (Artifact artifact : artifacts) {
- File file = artifact.getFile();
- if (file == null) {
- // log.warn("File not found for " + artifact);
-
- file = artifactToFile(artifact);
-
- if (!file.exists()) {
- log.warn("Generated file " + file + " for " + artifact
- + " does not exist");
- continue artifacts;
- }
+ File jarFile = MavenConventionsUtils.artifactToFile(artifact);
+ if (!jarFile.exists()) {
+ log.warn("Generated file " + jarFile + " for " + artifact
+ + " does not exist");
+ continue artifacts;
}
+ artifact.setFile(jarFile);
try {
- String parentPath = artifactBasePath
- + (artifactBasePath.endsWith("/") ? "" : "/")
- + artifactParentPath(artifact);
+ String parentPath = MavenConventionsUtils
+ .artifactParentPath(artifactBasePath, artifact);
Node parentNode;
- if (!jcrSession.itemExists(parentPath)) {
+ if (!jcrSession.itemExists(parentPath))
parentNode = JcrUtils.mkdirs(jcrSession, parentPath,
NodeType.NT_FOLDER);
- } else {
+ else
parentNode = jcrSession.getNode(parentPath);
- }
Node fileNode;
- if (!parentNode.hasNode(file.getName())) {
- fileNode = createFileNode(parentNode, file);
+ if (!parentNode.hasNode(jarFile.getName())) {
+ fileNode = createFileNode(parentNode, jarFile);
} else {
- fileNode = parentNode.getNode(file.getName());
+ fileNode = parentNode.getNode(jarFile.getName());
}
if (artifactIndexer.support(fileNode.getPath()))
jarFileIndexer.index(fileNode);
jcrSession.save();
- if (fileNode.hasProperty(SlcNames.SLC_SYMBOLIC_NAME)) {
- String distPath = bundleDistributionPath(fileNode);
- if (!jcrSession.itemExists(distPath)
- && fileNode
- .isNodeType(SlcTypes.SLC_BUNDLE_ARTIFACT))
- jcrSession.getWorkspace().clone(
- jcrSession.getWorkspace().getName(),
- fileNode.getPath(), distPath, false);
- if (log.isDebugEnabled())
- log.debug("Synchronized " + fileNode);
- }
+ addPdeSource(jcrSession, artifact, jarFile, artifacts);
+ jcrSession.save();
+
+ if (log.isDebugEnabled())
+ log.debug("Synchronized " + fileNode);
} catch (Exception e) {
log.error("Could not synchronize " + artifact, e);
jcrSession.refresh(false);
}
Long duration = (System.currentTimeMillis() - begin) / 1000;
- if (log.isDebugEnabled())
+ if (log.isDebugEnabled()) {
log.debug("Synchronized distribution in " + duration + "s");
+ log.debug("The following artifacts have no sources:");
+ for (Artifact artifact : artifactsWithoutSources) {
+ log.debug(artifact);
+ }
+ }
} catch (Exception e) {
throw new SlcException("Cannot synchronize distribution", e);
}
}
- protected String artifactParentPath(Artifact artifact) {
- return artifact.getGroupId().replace('.', '/') + '/'
- + artifact.getArtifactId() + '/' + artifact.getVersion();
- }
-
- protected String bundleDistributionPath(Node fileNode) {
+ /** Try to add PDE sources */
+ private void addPdeSource(Session session, Artifact artifact,
+ File artifactFile, Set<Artifact> artifactsWithoutSources) {
+ ByteArrayOutputStream out = new ByteArrayOutputStream();
try {
- return distributionsBasePath
- + '/'
- + distributionName
- + '/'
- + fileNode.getProperty(SlcNames.SLC_SYMBOLIC_NAME)
- .getString()
- + '_'
- + fileNode.getProperty(SlcNames.SLC_BUNDLE_VERSION)
- .getString();
- } catch (RepositoryException e) {
- throw new SlcException("Cannot create distribution path for "
- + fileNode, e);
- }
- }
-
- protected File artifactToFile(Artifact artifact) {
- return new File(System.getProperty("user.home")
- + File.separator
- + ".m2"
- + File.separator
- + "repository"
- + File.separator
- + artifact.getGroupId().replace('.', File.separatorChar)
- + File.separator
- + artifact.getArtifactId()
- + File.separator
- + artifact.getVersion()
- + File.separator
- + artifact.getArtifactId()
- + '-'
- + artifact.getVersion()
- + (artifact.getClassifier().equals("") ? ""
- : '-' + artifact.getClassifier()) + '.'
- + artifact.getExtension());
- }
+ File origSourceFile = null;
+ Artifact origSourceArtifact = new DefaultArtifact(
+ artifact.getGroupId(), artifact.getArtifactId(), "sources",
+ artifact.getExtension(), artifact.getVersion());
+ Artifact targetSourceArtifact = new DefaultArtifact(
+ artifact.getGroupId(),
+ artifact.getArtifactId() + ".source",
+ artifact.getExtension(), artifact.getVersion());
+ try {
+ origSourceFile = aetherTemplate
+ .getResolvedFile(origSourceArtifact);
+ } catch (Exception e) {
+ // also try artifact following the conventions
+ origSourceArtifact = targetSourceArtifact;
+ origSourceFile = aetherTemplate
+ .getResolvedFile(origSourceArtifact);
+ }
- private Node createFileNode(Node parentNode, File file) {
- Binary binary = null;
- try {
- Node fileNode = parentNode
- .addNode(file.getName(), NodeType.NT_FILE);
- Node contentNode = fileNode.addNode(Node.JCR_CONTENT,
- NodeType.NT_RESOURCE);
- binary = contentNode.getSession().getValueFactory()
- .createBinary(new FileInputStream(file));
- contentNode.setProperty(Property.JCR_DATA, binary);
- return fileNode;
+ String parentPath = MavenConventionsUtils.artifactParentPath(
+ artifactBasePath, artifact);
+ Node parentNode = JcrUtils.mkdirs(session, parentPath,
+ NodeType.NT_FOLDER);
+ NameVersion bundleNameVersion = RepoUtils
+ .readNameVersion(artifactFile);
+ RepoUtils.packagesAsPdeSource(origSourceFile, bundleNameVersion,
+ out);
+ String targetSourceFileName = MavenConventionsUtils
+ .artifactFileName(targetSourceArtifact);
+ JcrUtils.copyBytesAsFile(parentNode, targetSourceFileName,
+ out.toByteArray());
} catch (Exception e) {
- throw new SlcException("Cannot create file node based on " + file
- + " under " + parentNode, e);
+ log.error("Cannot add PDE source for " + artifact + ": " + e);
+ artifactsWithoutSources.add(artifact);
} finally {
- if (binary != null)
- binary.dispose();
+ IOUtils.closeQuietly(out);
}
}
+ private Node createFileNode(Node parentNode, File file) {
+ return JcrUtils.copyFile(parentNode, file);
+ }
+
/** Recursively adds non optional dependencies */
- private void addDependencies(Set<Artifact> artifacts, DependencyNode node) {
+ private void addDependencies(Set<Artifact> artifacts, DependencyNode node,
+ String ancestors) {
+ if (artifacts.contains(node.getDependency().getArtifact()))
+ return;
+ String currentArtifactId = node.getDependency().getArtifact()
+ .getArtifactId();
+ if (log.isDebugEnabled()) {
+ log.debug("# Add dependency for " + currentArtifactId);
+ if (ancestors != null)
+ log.debug(ancestors);
+ }
for (DependencyNode child : node.getChildren()) {
if (!child.getDependency().isOptional()) {
- addArtifact(artifacts, child.getDependency().getArtifact());
- addDependencies(artifacts, child);
+ if (willAdd(child.getDependency().getArtifact())) {
+ addArtifact(artifacts, child.getDependency().getArtifact());
+ addDependencies(artifacts, child, currentArtifactId + "\n"
+ + (ancestors != null ? ancestors : ""));
+ }
}
}
}
- private void addArtifact(Set<Artifact> artifacts, Artifact artifact) {
- if (!excludedArtifacts.contains(artifact.getGroupId() + ":"
- + artifact.getArtifactId()))
+ /** @return whether it was added */
+ private Boolean addArtifact(Set<Artifact> artifacts, Artifact artifact) {
+ Boolean willAdd = willAdd(artifact);
+ if (willAdd)
artifacts.add(artifact);
+ else
+ log.info("Skip " + artifact);
+ return willAdd;
}
- /**
- * Directly parses Maven POM XML format in order to find all artifacts
- * references under the dependency and dependencyManagement tags. This is
- * meant to migrate existing pom registering a lot of artifacts, not to
- * replace Maven resolving.
- */
- protected void parsePom(AetherTemplate aetherTemplate,
- Set<Artifact> artifacts, Artifact pomArtifact) {
- if (log.isDebugEnabled())
- log.debug("Gather dependencies for " + pomArtifact);
-
- try {
- File file = aetherTemplate.getResolvedFile(pomArtifact);
- DocumentBuilder documentBuilder = DocumentBuilderFactory
- .newInstance().newDocumentBuilder();
- Document doc = documentBuilder.parse(file);
-
- // properties
- Properties props = new Properties();
- props.setProperty("project.version", pomArtifact.getBaseVersion());
- NodeList properties = doc.getElementsByTagName("properties");
- if (properties.getLength() > 0) {
- NodeList propertiesElems = properties.item(0).getChildNodes();
- for (int i = 0; i < propertiesElems.getLength(); i++) {
- if (propertiesElems.item(i) instanceof Element) {
- Element property = (Element) propertiesElems.item(i);
- props.put(property.getNodeName(),
- property.getTextContent());
- }
- }
- }
-
- // dependencies (direct and dependencyManagement)
- NodeList dependencies = doc.getElementsByTagName("dependency");
- for (int i = 0; i < dependencies.getLength(); i++) {
- Element dependency = (Element) dependencies.item(i);
- String groupId = dependency.getElementsByTagName("groupId")
- .item(0).getTextContent().trim();
- String artifactId = dependency
- .getElementsByTagName("artifactId").item(0)
- .getTextContent().trim();
- String version = dependency.getElementsByTagName("version")
- .item(0).getTextContent().trim();
- if (version.startsWith("${")) {
- String versionKey = version.substring(0,
- version.length() - 1).substring(2);
- if (!props.containsKey(versionKey))
- throw new SlcException("Cannot interpret version "
- + version);
- version = props.getProperty(versionKey);
- }
- NodeList scopes = dependency.getElementsByTagName("scope");
- if (scopes.getLength() > 0
- && scopes.item(0).getTextContent().equals("import")) {
- // recurse
- parsePom(aetherTemplate, artifacts, new DefaultArtifact(
- groupId, artifactId, "pom", version));
- } else {
- // TODO: deal with scope?
- // TODO: deal with type
- String type = "jar";
- Artifact artifact = new DefaultArtifact(groupId,
- artifactId, type, version);
- artifacts.add(artifact);
- }
- }
- } catch (Exception e) {
- throw new SlcException("Cannot process " + pomArtifact, e);
- }
+ private Boolean willAdd(Artifact artifact) {
+ Boolean willAdd = true;
+ if (excludedArtifacts.contains(artifact.getGroupId() + ":"
+ + artifact.getArtifactId()))
+ willAdd = false;
+ else if (excludedArtifacts.contains(artifact.getGroupId() + ":*"))
+ willAdd = false;
+ return willAdd;
}
public void setAetherTemplate(AetherTemplate aetherTemplate) {
this.rootCoordinates = rootCoordinates;
}
- public void setDistributionName(String distributionName) {
- this.distributionName = distributionName;
- }
-
public void setRepository(Repository repository) {
this.repository = repository;
}
--- /dev/null
+/*
+ * Copyright (C) 2007-2012 Mathieu Baudier
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.argeo.slc.repo.maven;
+
+import java.io.File;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.jcr.Node;
+import javax.jcr.Repository;
+import javax.jcr.RepositoryException;
+import javax.jcr.Session;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.jcr.JcrUtils;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.jcr.SlcNames;
+import org.argeo.slc.jcr.SlcTypes;
+import org.argeo.slc.repo.RepoConstants;
+import org.sonatype.aether.artifact.Artifact;
+
+/** Create a distribution node from a set of artifacts */
+public class IndexDistribution implements Runnable {
+ private final static Log log = LogFactory.getLog(IndexDistribution.class);
+ private Repository repository;
+ private String workspace;
+
+ private String artifactBasePath = RepoConstants.ARTIFACTS_BASE_PATH;
+ private String distributionsBasePath = RepoConstants.DISTRIBUTIONS_BASE_PATH;
+ private String distributionName;
+
+ public void run() {
+ // TODO populate
+ Set<Artifact> artifacts = new HashSet<Artifact>();
+
+ // sync
+ Session session = null;
+ try {
+ session = repository.login(workspace);
+ syncDistribution(session, artifacts);
+ } catch (Exception e) {
+ throw new SlcException("Cannot import distribution", e);
+ } finally {
+ JcrUtils.logoutQuietly(session);
+ }
+ }
+
+ protected void syncDistribution(Session jcrSession, Set<Artifact> artifacts) {
+ Long begin = System.currentTimeMillis();
+ try {
+ JcrUtils.mkdirs(jcrSession, distributionsBasePath + '/'
+ + distributionName);
+ artifacts: for (Artifact artifact : artifacts) {
+ File file = artifact.getFile();
+ if (file == null) {
+ file = MavenConventionsUtils.artifactToFile(artifact);
+ if (!file.exists()) {
+ log.warn("Generated file " + file + " for " + artifact
+ + " does not exist");
+ continue artifacts;
+ }
+ }
+
+ try {
+ String parentPath = artifactBasePath
+ + (artifactBasePath.endsWith("/") ? "" : "/")
+ + artifactParentPath(artifact);
+ Node parentNode = jcrSession.getNode(parentPath);
+ Node fileNode = parentNode.getNode(file.getName());
+
+ if (fileNode.hasProperty(SlcNames.SLC_SYMBOLIC_NAME)) {
+ String distPath = bundleDistributionPath(fileNode);
+ if (!jcrSession.itemExists(distPath)
+ && fileNode
+ .isNodeType(SlcTypes.SLC_BUNDLE_ARTIFACT))
+ jcrSession.getWorkspace().clone(
+ jcrSession.getWorkspace().getName(),
+ fileNode.getPath(), distPath, false);
+ if (log.isDebugEnabled())
+ log.debug("Indexed " + fileNode);
+ }
+ } catch (Exception e) {
+ log.error("Could not index " + artifact, e);
+ jcrSession.refresh(false);
+ throw e;
+ }
+ }
+
+ Long duration = (System.currentTimeMillis() - begin) / 1000;
+ if (log.isDebugEnabled())
+ log.debug("Indexed distribution in " + duration + "s");
+ } catch (Exception e) {
+ throw new SlcException("Cannot synchronize distribution", e);
+ }
+ }
+
+ private String artifactParentPath(Artifact artifact) {
+ return artifact.getGroupId().replace('.', '/') + '/'
+ + artifact.getArtifactId() + '/' + artifact.getVersion();
+ }
+
+ private String bundleDistributionPath(Node fileNode) {
+ try {
+ return distributionsBasePath
+ + '/'
+ + distributionName
+ + '/'
+ + fileNode.getProperty(SlcNames.SLC_SYMBOLIC_NAME)
+ .getString()
+ + '_'
+ + fileNode.getProperty(SlcNames.SLC_BUNDLE_VERSION)
+ .getString();
+ } catch (RepositoryException e) {
+ throw new SlcException("Cannot create distribution path for "
+ + fileNode, e);
+ }
+ }
+
+ public void setDistributionName(String distributionName) {
+ this.distributionName = distributionName;
+ }
+
+ public void setRepository(Repository repository) {
+ this.repository = repository;
+ }
+
+ public void setWorkspace(String workspace) {
+ this.workspace = workspace;
+ }
+
+}
--- /dev/null
+package org.argeo.slc.repo.maven;
+
+import java.io.File;
+import java.util.Properties;
+import java.util.Set;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.argeo.slc.SlcException;
+import org.argeo.slc.aether.AetherTemplate;
+import org.sonatype.aether.artifact.Artifact;
+import org.sonatype.aether.util.artifact.DefaultArtifact;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+
+/**
+ * Static utilities around Maven which are NOT using the Maven APIs (conventions
+ * based).
+ */
+public class MavenConventionsUtils {
+ private final static Log log = LogFactory
+ .getLog(MavenConventionsUtils.class);
+
+ /**
+ * Path to the file identified by this artifact <b>without</b> using Maven
+ * APIs (convention based). Default location of repository
+ * (~/.m2/repository) is used here.
+ *
+ * @see MavenConventionsUtils#artifactToFile(String, Artifact)
+ */
+ public static File artifactToFile(Artifact artifact) {
+ return artifactToFile(System.getProperty("user.home") + File.separator
+ + ".m2" + File.separator + "repository", artifact);
+ }
+
+ /**
+ * Path to the file identified by this artifact <b>without</b> using Maven
+ * APIs (convention based).
+ *
+ * @param repositoryPath
+ * path to the related local repository location
+ * @param artifact
+ * the artifact
+ */
+ public static File artifactToFile(String repositoryPath, Artifact artifact) {
+ return new File(repositoryPath + File.separator
+ + artifact.getGroupId().replace('.', File.separatorChar)
+ + File.separator + artifact.getArtifactId() + File.separator
+ + artifact.getVersion() + File.separator
+ + artifactFileName(artifact)).getAbsoluteFile();
+ }
+
+ /** The file name of this artifact when stored */
+ public static String artifactFileName(Artifact artifact) {
+ return artifact.getArtifactId()
+ + '-'
+ + artifact.getVersion()
+ + (artifact.getClassifier().equals("") ? "" : '-' + artifact
+ .getClassifier()) + '.' + artifact.getExtension();
+ }
+
+ /** Absolute path to the directories where the files will be stored */
+ public static String artifactParentPath(String artifactBasePath,
+ Artifact artifact) {
+ return artifactBasePath + (artifactBasePath.endsWith("/") ? "" : "/")
+ + artifactParentPath(artifact);
+ }
+
+ /** Relative path to the directories where the files will be stored */
+ public static String artifactParentPath(Artifact artifact) {
+ return artifact.getGroupId().replace('.', '/') + '/'
+ + artifact.getArtifactId() + '/' + artifact.getVersion();
+ }
+
+ /**
+ * Directly parses Maven POM XML format in order to find all artifacts
+ * references under the dependency and dependencyManagement tags. This is
+ * meant to migrate existing pom registering a lot of artifacts, not to
+ * replace Maven resolving.
+ */
+ public static void gatherPomDependencies(AetherTemplate aetherTemplate,
+ Set<Artifact> artifacts, Artifact pomArtifact) {
+ if (log.isDebugEnabled())
+ log.debug("Gather dependencies for " + pomArtifact);
+
+ try {
+ File file = aetherTemplate.getResolvedFile(pomArtifact);
+ DocumentBuilder documentBuilder = DocumentBuilderFactory
+ .newInstance().newDocumentBuilder();
+ Document doc = documentBuilder.parse(file);
+
+ // properties
+ Properties props = new Properties();
+ props.setProperty("project.version", pomArtifact.getBaseVersion());
+ NodeList properties = doc.getElementsByTagName("properties");
+ if (properties.getLength() > 0) {
+ NodeList propertiesElems = properties.item(0).getChildNodes();
+ for (int i = 0; i < propertiesElems.getLength(); i++) {
+ if (propertiesElems.item(i) instanceof Element) {
+ Element property = (Element) propertiesElems.item(i);
+ props.put(property.getNodeName(),
+ property.getTextContent());
+ }
+ }
+ }
+
+ // dependencies (direct and dependencyManagement)
+ NodeList dependencies = doc.getElementsByTagName("dependency");
+ for (int i = 0; i < dependencies.getLength(); i++) {
+ Element dependency = (Element) dependencies.item(i);
+ String groupId = dependency.getElementsByTagName("groupId")
+ .item(0).getTextContent().trim();
+ String artifactId = dependency
+ .getElementsByTagName("artifactId").item(0)
+ .getTextContent().trim();
+ String version = dependency.getElementsByTagName("version")
+ .item(0).getTextContent().trim();
+ if (version.startsWith("${")) {
+ String versionKey = version.substring(0,
+ version.length() - 1).substring(2);
+ if (!props.containsKey(versionKey))
+ throw new SlcException("Cannot interpret version "
+ + version);
+ version = props.getProperty(versionKey);
+ }
+ NodeList scopes = dependency.getElementsByTagName("scope");
+ if (scopes.getLength() > 0
+ && scopes.item(0).getTextContent().equals("import")) {
+ // recurse
+ gatherPomDependencies(aetherTemplate, artifacts,
+ new DefaultArtifact(groupId, artifactId, "pom",
+ version));
+ } else {
+ // TODO: deal with scope?
+ // TODO: deal with type
+ String type = "jar";
+ Artifact artifact = new DefaultArtifact(groupId,
+ artifactId, type, version);
+ artifacts.add(artifact);
+ }
+ }
+ } catch (Exception e) {
+ throw new SlcException("Cannot process " + pomArtifact, e);
+ }
+ }
+
+ /** Prevent instantiation */
+ private MavenConventionsUtils() {
+ }
+}