*/
package org.argeo.slc.client.ui.dist.commands;
+import javax.jcr.Binary;
import javax.jcr.Node;
+import javax.jcr.NodeIterator;
import javax.jcr.Property;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.argeo.ArgeoMonitor;
+import org.argeo.eclipse.ui.EclipseArgeoMonitor;
+import org.argeo.eclipse.ui.dialogs.SingleValue;
import org.argeo.jcr.JcrUtils;
+import org.argeo.slc.NameVersion;
import org.argeo.slc.SlcException;
+import org.argeo.slc.aether.AetherUtils;
import org.argeo.slc.client.ui.dist.DistPlugin;
import org.argeo.slc.jcr.SlcNames;
+import org.argeo.slc.jcr.SlcTypes;
import org.argeo.slc.repo.ArtifactIndexer;
import org.argeo.slc.repo.JarFileIndexer;
+import org.argeo.slc.repo.RepoUtils;
+import org.argeo.slc.repo.maven.MavenConventionsUtils;
+import org.argeo.slc.repo.osgi.NormalizeGroup;
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.runtime.jobs.Job;
+import org.sonatype.aether.artifact.Artifact;
+import org.sonatype.aether.util.artifact.DefaultArtifact;
/** Make sure than Maven and OSGi metadata are consistent */
public class NormalizeDistribution extends AbstractHandler implements SlcNames {
public Object execute(ExecutionEvent event) throws ExecutionException {
String workspace = event.getParameter(PARAM_WORKSPACE);
+ String version = SingleValue.ask("Version",
+ "Enter Distribution Version");
+ if (version == null)
+ return null;
+
NormalizeJob job;
try {
- job = new NormalizeJob(repository.login(workspace));
+ job = new NormalizeJob(repository.login(workspace), version);
} catch (RepositoryException e) {
throw new SlcException("Cannot normalize " + workspace, e);
}
return null;
}
+ protected void packageSourcesAsPdeSource(Node sourcesNode) {
+ Binary origBinary = null;
+ Binary osgiBinary = null;
+ try {
+ Session session = sourcesNode.getSession();
+ Artifact sourcesArtifact = AetherUtils.convertPathToArtifact(
+ sourcesNode.getPath(), null);
+
+ // read name version from manifest
+ Artifact osgiArtifact = new DefaultArtifact(
+ sourcesArtifact.getGroupId(),
+ sourcesArtifact.getArtifactId(),
+ sourcesArtifact.getExtension(),
+ sourcesArtifact.getVersion());
+ String osgiPath = MavenConventionsUtils.artifactPath(
+ artifactBasePath, osgiArtifact);
+ osgiBinary = session.getNode(osgiPath).getNode(Node.JCR_CONTENT)
+ .getProperty(Property.JCR_DATA).getBinary();
+
+ NameVersion nameVersion = RepoUtils.readNameVersion(osgiBinary
+ .getStream());
+
+ // create PDe sources artifact
+ Artifact pdeSourceArtifact = new DefaultArtifact(
+ sourcesArtifact.getGroupId(),
+ sourcesArtifact.getArtifactId() + ".source",
+ sourcesArtifact.getExtension(),
+ sourcesArtifact.getVersion());
+ String targetSourceParentPath = MavenConventionsUtils
+ .artifactParentPath(artifactBasePath, pdeSourceArtifact);
+ String targetSourceFileName = MavenConventionsUtils
+ .artifactFileName(pdeSourceArtifact);
+ String targetSourceJarPath = targetSourceParentPath + '/'
+ + targetSourceFileName;
+
+ Node targetSourceParentNode = JcrUtils.mkfolders(session,
+ targetSourceParentPath);
+ origBinary = sourcesNode.getNode(Node.JCR_CONTENT)
+ .getProperty(Property.JCR_DATA).getBinary();
+ byte[] targetJarBytes = RepoUtils.packageAsPdeSource(
+ origBinary.getStream(), nameVersion);
+ JcrUtils.copyBytesAsFile(targetSourceParentNode,
+ targetSourceFileName, targetJarBytes);
+
+ // reindex
+ Node targetSourceJarNode = session.getNode(targetSourceJarPath);
+ artifactIndexer.index(targetSourceJarNode);
+ jarFileIndexer.index(targetSourceJarNode);
+ } catch (RepositoryException e) {
+ throw new SlcException("Cannot add PDE sources for " + sourcesNode,
+ e);
+ } finally {
+ JcrUtils.closeQuietly(origBinary);
+ JcrUtils.closeQuietly(osgiBinary);
+ }
+
+ }
+
public void setRepository(Repository repository) {
this.repository = repository;
}
private class NormalizeJob extends Job {
private Session session;
+ private String version;
- public NormalizeJob(Session session) {
+ public NormalizeJob(Session session, String version) {
super("Normalize Distribution");
this.session = session;
+ this.version = version;
}
@Override
- protected IStatus run(IProgressMonitor monitor) {
- // Session session = null;
+ protected IStatus run(IProgressMonitor progressMonitor) {
+
try {
- // session = repository.login(workspace);
- // QueryManager qm = session.getWorkspace().getQueryManager();
- // Query query = qm
- // .createQuery(
- // "select * from [nt:file] where NAME([nt:file]) like '%.jar'",
- // Query.JCR_SQL2);
- // // Query query = qm.createQuery("//*jar", Query.XPATH);
- // long count = query.execute().getRows().getSize();
- // if (log.isDebugEnabled())
- // log.debug("Count: " + count);
- // long count = query.execute().getRows().nextRow()
- // .getValue("count").getLong();
+ ArgeoMonitor monitor = new EclipseArgeoMonitor(progressMonitor);
+ // normalize artifacts
Query countQuery = session
.getWorkspace()
.getQueryManager()
Query.JCR_SQL2);
QueryResult result = countQuery.execute();
Long expectedCount = result.getNodes().getSize();
-
- monitor.beginTask("Normalize "
+ monitor.beginTask("Normalize artifacts of "
+ session.getWorkspace().getName(),
expectedCount.intValue());
NormalizingTraverser tiv = new NormalizingTraverser(monitor);
session.getNode(artifactBasePath).accept(tiv);
+
+ // normalize groups
+ Query groupQuery = session
+ .getWorkspace()
+ .getQueryManager()
+ .createQuery(
+ "select group from [" + SlcTypes.SLC_GROUP_BASE
+ + "] as group", Query.JCR_SQL2);
+ NodeIterator groups = groupQuery.execute().getNodes();
+ monitor.beginTask("Normalize groups of "
+ + session.getWorkspace().getName(),
+ (int) groups.getSize());
+ while (groups.hasNext()) {
+ NormalizeGroup normalizeGroup = new NormalizeGroup();
+ normalizeGroup.setArtifactBasePath(artifactBasePath);
+ normalizeGroup.processGroupNode(groups.nextNode(), version,
+ monitor);
+ }
} catch (Exception e) {
return new Status(IStatus.ERROR, DistPlugin.ID,
"Cannot normalize distribution "
}
private class NormalizingTraverser extends TraversingItemVisitor {
- IProgressMonitor monitor;
+ ArgeoMonitor monitor;
- public NormalizingTraverser(IProgressMonitor monitor) {
+ public NormalizingTraverser(ArgeoMonitor monitor) {
super();
this.monitor = monitor;
}
protected void entering(Node node, int level)
throws RepositoryException {
if (node.isNodeType(NodeType.NT_FILE)) {
- if (jarFileIndexer.support(node.getPath()))
- if (artifactIndexer.support(node.getPath())) {
- monitor.subTask(node.getName());
- artifactIndexer.index(node);
- jarFileIndexer.index(node);
- node.getSession().save();
- monitor.worked(1);
- if (log.isDebugEnabled())
- log.debug("Processed " + node);
- }
+ if (node.getName().endsWith("-sources.jar")) {
+ monitor.subTask(node.getName());
+ packageSourcesAsPdeSource(node);
+ node.getSession().save();
+ monitor.worked(1);
+ if (log.isDebugEnabled())
+ log.debug("Processed source artifact " + node.getPath());
+ } else if (node.getName().endsWith(".jar")) {
+ if (jarFileIndexer.support(node.getPath()))
+ if (artifactIndexer.support(node.getPath())) {
+ monitor.subTask(node.getName());
+ artifactIndexer.index(node);
+ jarFileIndexer.index(node);
+ node.getSession().save();
+ monitor.worked(1);
+ if (log.isDebugEnabled())
+ log.debug("Processed artifact "
+ + node.getPath());
+ }
+ } else {
+ monitor.worked(1);
+ }
}
}
<parent>
<groupId>org.argeo.commons</groupId>
<artifactId>argeo-commons</artifactId>
- <version>1.1.8-SNAPSHOT</version>
+ <version>1.1.8</version>
</parent>
<groupId>org.argeo.slc</groupId>
<artifactId>argeo-slc</artifactId>
<properties>
<developmentCycle.slc>1.1</developmentCycle.slc>
<developmentCycle.startDate>2012-06-27</developmentCycle.startDate>
- <site.repoBase>file:///srv/projects/www/slc/site</site.repoBase>
- <site.urlBase>http://projects.argeo.org/slc/site</site.urlBase>
- <version.slc>1.1.6</version.slc>
+ <version.slc>1.1.7-SNAPSHOT</version.slc>
</properties>
<modules>
<module>runtime</module>
<module>dist</module>
<module>demo</module>
</modules>
- <url>${site.urlBase}/${developmentCycle.slc}</url>
+ <url>http://projects.argeo.org/slc/</url>
<scm>
<connection>scm:svn:https://svn.argeo.org/slc/trunk</connection>
<developerConnection>scm:svn:https://svn.argeo.org/slc/trunk</developerConnection>
<distributionManagement>
<repository>
<id>staging</id>
- <url>dav:https://repo.argeo.org/data/files/java/org.argeo.slc-1.1.x</url>
+ <url>dav:https://repo.argeo.org/data/files/java/org.argeo.slc-${developmentCycle.slc}.x</url>
<uniqueVersion>false</uniqueVersion>
</repository>
<site>
<id>site</id>
<name>SLC Site</name>
- <url>file:///srv/projects/www/slc/site/${project.version}</url>
+ <url>dav:https://repo.argeo.org/data/files/docs/org.argeo.slc-${developmentCycle.slc}.x</url>
</site>
</distributionManagement>
</project>
artifact.getClassifier());
JcrUtils.updateLastModified(fileNode);
- // make sure there is a checksum
+ // make sure there are checksums
String shaNodeName = fileNode.getName() + ".sha1";
if (!fileNode.getParent().hasNode(shaNodeName)) {
String sha = JcrUtils.checksumFile(fileNode, "SHA-1");
JcrUtils.copyBytesAsFile(fileNode.getParent(), shaNodeName,
sha.getBytes());
}
+ String md5NodeName = fileNode.getName() + ".md5";
+ if (!fileNode.getParent().hasNode(md5NodeName)) {
+ String md5 = JcrUtils.checksumFile(fileNode, "MD5");
+ JcrUtils.copyBytesAsFile(fileNode.getParent(), md5NodeName,
+ md5.getBytes());
+ }
// set higher levels
Node artifactVersionBase = fileNode.getParent();
/** Read the OSGi {@link NameVersion} */
public static NameVersion readNameVersion(File artifactFile) {
+ try {
+ return readNameVersion(new FileInputStream(artifactFile));
+ } catch (Exception e) {
+ // probably not a jar, skipping
+ if (log.isDebugEnabled()) {
+ log.debug("Skipping " + artifactFile + " because of " + e);
+ // e.printStackTrace();
+ }
+ }
+ return null;
+ }
+
+ /** Read the OSGi {@link NameVersion} */
+ public static NameVersion readNameVersion(InputStream in) {
JarInputStream jarInputStream = null;
try {
- jarInputStream = new JarInputStream(new FileInputStream(
- artifactFile));
+ jarInputStream = new JarInputStream(in);
return readNameVersion(jarInputStream.getManifest());
} catch (Exception e) {
// probably not a jar, skipping
if (log.isDebugEnabled()) {
- log.debug("Skipping " + artifactFile + " because of " + e);
+ log.debug("Skipping because of " + e);
// e.printStackTrace();
}
} finally {
/** Relative path to the directories where the files will be stored */
public static String artifactParentPath(Artifact artifact) {
return artifact.getGroupId().replace('.', '/') + '/'
- + artifact.getArtifactId() + '/' + artifact.getVersion();
+ + artifact.getArtifactId() + '/' + artifact.getBaseVersion();
}
public static String artifactsAsDependencyPom(Artifact pomArtifact,
p.append("</dependencyManagement>\n");
// Repositories
-// p.append("<repositories>\n");
-// p.append("<repository><id>argeo</id><url>http://maven.argeo.org/argeo</url></repository>\n");
-// p.append("</repositories>\n");
+ // p.append("<repositories>\n");
+ // p.append("<repository><id>argeo</id><url>http://maven.argeo.org/argeo</url></repository>\n");
+ // p.append("</repositories>\n");
p.append("</project>\n");
return p.toString();
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.argeo.ArgeoMonitor;
import org.argeo.jcr.JcrUtils;
import org.argeo.slc.SlcException;
import org.argeo.slc.aether.ArtifactIdComparator;
Session session = null;
try {
session = repository.login(workspace);
-
Node groupNode = session.getNode(MavenConventionsUtils.groupPath(
artifactBasePath, groupId));
- // TODO factorize with a traverser pattern?
- for (NodeIterator artifactBases = groupNode.getNodes(); artifactBases
- .hasNext();) {
- Node artifactBase = artifactBases.nextNode();
- if (artifactBase.isNodeType(SlcTypes.SLC_ARTIFACT_BASE)) {
- for (NodeIterator artifactVersions = artifactBase
- .getNodes(); artifactVersions.hasNext();) {
- Node artifactVersion = artifactVersions.nextNode();
- if (artifactVersion
- .isNodeType(SlcTypes.SLC_ARTIFACT_VERSION_BASE))
- for (NodeIterator files = artifactVersion
- .getNodes(); files.hasNext();) {
- Node file = files.nextNode();
- if (file.isNodeType(SlcTypes.SLC_BUNDLE_ARTIFACT)) {
- preProcessBundleArtifact(file);
- file.getSession().save();
- if (log.isDebugEnabled())
- log.debug("Pre-processed "
- + file.getName());
- }
-
- }
- }
- }
- }
- // NodeIterator bundlesIt = listBundleArtifacts(session);
- //
- // while (bundlesIt.hasNext()) {
- // Node bundleNode = bundlesIt.nextNode();
- // preProcessBundleArtifact(bundleNode);
- // bundleNode.getSession().save();
- // if (log.isDebugEnabled())
- // log.debug("Pre-processed " + bundleNode.getName());
- // }
-
- int bundleCount = symbolicNamesToNodes.size();
- if (log.isDebugEnabled())
- log.debug("Indexed " + bundleCount + " bundles");
-
- int count = 1;
- for (Node bundleNode : symbolicNamesToNodes.values()) {
- processBundleArtifact(bundleNode);
- bundleNode.getSession().save();
- if (log.isDebugEnabled())
- log.debug(count + "/" + bundleCount + " Processed "
- + bundleNode.getName());
- count++;
- }
-
- // indexes
- Set<Artifact> indexes = new TreeSet<Artifact>(
- new ArtifactIdComparator());
- Artifact indexArtifact = writeIndex(session, BINARIES_ARTIFACT_ID,
- binaries);
- indexes.add(indexArtifact);
- indexArtifact = writeIndex(session, SOURCES_ARTIFACT_ID, sources);
- indexes.add(indexArtifact);
- // sdk
- writeIndex(session, SDK_ARTIFACT_ID, indexes);
+ processGroupNode(groupNode, null);
} catch (Exception e) {
throw new SlcException("Cannot normalize group " + groupId + " in "
+ workspace, e);
}
}
+ public synchronized void processGroupNode(Node groupNode, String version,
+ ArgeoMonitor monitor) throws RepositoryException {
+ // FIXME better encapsulate
+ groupId = groupNode.getProperty(SlcNames.SLC_GROUP_BASE_ID).getString();
+ this.version = version;
+ processGroupNode(groupNode, monitor);
+ }
+
+ protected void processGroupNode(Node groupNode, ArgeoMonitor monitor)
+ throws RepositoryException {
+ if (monitor != null)
+ monitor.subTask("Group " + groupId);
+ Session session = groupNode.getSession();
+ for (NodeIterator artifactBases = groupNode.getNodes(); artifactBases
+ .hasNext();) {
+ Node artifactBase = artifactBases.nextNode();
+ if (artifactBase.isNodeType(SlcTypes.SLC_ARTIFACT_BASE)) {
+ for (NodeIterator artifactVersions = artifactBase.getNodes(); artifactVersions
+ .hasNext();) {
+ Node artifactVersion = artifactVersions.nextNode();
+ if (artifactVersion
+ .isNodeType(SlcTypes.SLC_ARTIFACT_VERSION_BASE))
+ for (NodeIterator files = artifactVersion.getNodes(); files
+ .hasNext();) {
+ Node file = files.nextNode();
+ if (file.isNodeType(SlcTypes.SLC_BUNDLE_ARTIFACT)) {
+ preProcessBundleArtifact(file);
+ file.getSession().save();
+ if (log.isDebugEnabled())
+ log.debug("Pre-processed " + file.getName());
+ }
+
+ }
+ }
+ }
+ }
+ // NodeIterator bundlesIt = listBundleArtifacts(session);
+ //
+ // while (bundlesIt.hasNext()) {
+ // Node bundleNode = bundlesIt.nextNode();
+ // preProcessBundleArtifact(bundleNode);
+ // bundleNode.getSession().save();
+ // if (log.isDebugEnabled())
+ // log.debug("Pre-processed " + bundleNode.getName());
+ // }
+
+ int bundleCount = symbolicNamesToNodes.size();
+ if (log.isDebugEnabled())
+ log.debug("Indexed " + bundleCount + " bundles");
+
+ int count = 1;
+ for (Node bundleNode : symbolicNamesToNodes.values()) {
+ processBundleArtifact(bundleNode);
+ bundleNode.getSession().save();
+ if (log.isDebugEnabled())
+ log.debug(count + "/" + bundleCount + " Processed "
+ + bundleNode.getName());
+ count++;
+ }
+
+ // indexes
+ Set<Artifact> indexes = new TreeSet<Artifact>(
+ new ArtifactIdComparator());
+ Artifact indexArtifact = writeIndex(session, BINARIES_ARTIFACT_ID,
+ binaries);
+ indexes.add(indexArtifact);
+ indexArtifact = writeIndex(session, SOURCES_ARTIFACT_ID, sources);
+ indexes.add(indexArtifact);
+ // sdk
+ writeIndex(session, SDK_ARTIFACT_ID, indexes);
+ if (monitor != null)
+ monitor.worked(1);
+ }
+
private Artifact writeIndex(Session session, String artifactId,
Set<Artifact> artifacts) throws RepositoryException {
Artifact artifact = new DefaultArtifact(groupId, artifactId, "pom",