Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch to manifesting dependency graphs instead of trees #302

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,10 @@
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import org.eclipse.aether.repository.RemoteRepository;
import org.jboss.logging.Logger;
Expand All @@ -28,7 +31,7 @@ public class PurgingDependencyTreeVisitor implements DependencyTreeVisitor {
private final AtomicLong uniqueNodesTotal = new AtomicLong();
private List<VisitedComponentImpl> roots;
private ArrayDeque<VisitedComponentImpl> branch;
private Map<ArtifactCoords, List<VisitedComponentImpl>> nodeVariations;
private Map<ArtifactCoords, ComponentVariations> nodeVariations;
private Map<ArtifactCoords, VisitedComponentImpl> treeComponents;

@Override
Expand All @@ -37,7 +40,7 @@ public void beforeAllRoots() {
uniqueNodesTotal.set(0);
roots = new ArrayList<>();
branch = new ArrayDeque<>();
nodeVariations = new HashMap<>();
nodeVariations = new ConcurrentHashMap<>();
}

@Override
Expand All @@ -53,9 +56,9 @@ private void purge() {
//log.infof("Roots total: %s", roots.size());
//log.infof("Nodes total: %s", nodesTotal);

var treeProcessor = newTreeProcessor();
for (VisitedComponentImpl root : roots) {
// we want to process each tree separately due to possible variations across different trees
var treeProcessor = newTreeProcessor();
treeProcessor.addRoot(root);
var results = treeProcessor.schedule().join();
boolean failures = false;
Expand All @@ -78,7 +81,7 @@ private void purge() {
var i = roots.iterator();
while (i.hasNext()) {
var current = i.next();
VisitedComponent previous = ids.put(current.getBomRef(), current);
var previous = ids.put(current.getBomRef(), current);
if (previous != null) {
i.remove();
}
Expand All @@ -103,31 +106,8 @@ public Iterable<VisitedComponentImpl> getChildren(VisitedComponentImpl node) {
@Override
public Function<ExecutionContext<Long, VisitedComponentImpl, VisitedComponentImpl>, TaskResult<Long, VisitedComponentImpl, VisitedComponentImpl>> createFunction() {
return ctx -> {
VisitedComponentImpl currentNode = ctx.getNode();
final List<VisitedComponentImpl> variations = nodeVariations.get(currentNode.getArtifactCoords());
long processedVariations = 0;
if (variations.size() > 1) {
for (VisitedComponentImpl variation : variations) {
if (!variation.hasBomRefsSet()) {
continue;
}
processedVariations++;
if (variation.hasMatchingDirectDeps(currentNode)) {
if (currentNode.isRoot()) {
currentNode.setBomRef(variation.getBomRef());
} else {
currentNode.swap(variation);
currentNode = variation;
}
break;
}
}
}
if (currentNode.getBomRef() == null) {
uniqueNodesTotal.incrementAndGet();
currentNode.initializeBomRef(processedVariations);
}
return ctx.success(currentNode);
final VisitedComponentImpl currentNode = ctx.getNode();
return ctx.success(nodeVariations.get(currentNode.getArtifactCoords()).setBomRef(currentNode));
};
}
});
Expand Down Expand Up @@ -185,7 +165,7 @@ public void leaveBomImport(DependencyVisit visit) {
private VisitedComponentImpl enterNode(DependencyVisit visit) {
var parent = branch.peek();
var current = new VisitedComponentImpl(nodesTotal.getAndIncrement(), parent, visit);
nodeVariations.computeIfAbsent(visit.getCoords(), k -> new ArrayList<>()).add(current);
nodeVariations.computeIfAbsent(visit.getCoords(), k -> new ComponentVariations()).add(current);
if (parent != null) {
parent.addChild(current);
}
Expand All @@ -204,11 +184,12 @@ private class VisitedComponentImpl implements VisitedComponent {
private final ScmRevision revision;
private final ArtifactCoords coords;
private final List<RemoteRepository> repos;
private final Map<ArtifactCoords, VisitedComponentImpl> children = new HashMap<>();
private final Map<ArtifactCoords, VisitedComponentImpl> children = new ConcurrentHashMap<>();
private List<ArtifactCoords> linkedDeps;
private List<VisitedComponentImpl> linkedParents;
private String bomRef;
private PackageURL purl;
private boolean purged;

private VisitedComponentImpl(long index, VisitedComponentImpl parent, DependencyVisit visit) {
this.index = index;
Expand Down Expand Up @@ -284,6 +265,7 @@ private void swap(VisitedComponentImpl other) {
p.addChild(other);
}
}
purged = true;
}

private boolean hasMatchingDirectDeps(VisitedComponentImpl other) {
Expand Down Expand Up @@ -348,18 +330,6 @@ private void setBomRef(String bomRef) {
this.bomRef = bomRef;
}

private boolean hasBomRefsSet() {
if (bomRef == null) {
return false;
}
for (var c : children.values()) {
if (c.bomRef == null) {
return false;
}
}
return true;
}

private void initializeBomRef(long processedVariations) {
if (processedVariations == 0) {
bomRef = getPurl().toString();
Expand Down Expand Up @@ -388,6 +358,49 @@ public String toString() {
}
}

private class ComponentVariations {
private final List<VisitedComponentImpl> variations = new ArrayList<>();
private final ReadWriteLock lock = new ReentrantReadWriteLock();

private void add(VisitedComponentImpl variation) {
variations.add(variation);
}

private VisitedComponentImpl setBomRef(VisitedComponentImpl currentNode) {
if (currentNode.bomRef != null) {
return currentNode;
}
lock.readLock().lock();
try {
int processedVariations = 0;
if (variations.size() > 1) {
for (var variation : variations) {
if (variation.purged || variation.bomRef == null) {
continue;
}
processedVariations++;
if (variation.hasMatchingDirectDeps(currentNode)) {
if (currentNode.isRoot()) {
currentNode.setBomRef(variation.getBomRef());
} else {
currentNode.swap(variation);
currentNode = variation;
}
break;
}
}
}
if (currentNode.bomRef == null) {
uniqueNodesTotal.incrementAndGet();
currentNode.initializeBomRef(processedVariations);
}
} finally {
lock.readLock().unlock();
}
return currentNode;
}
}

private static boolean isSameGav(ArtifactCoords c1, ArtifactCoords c2) {
return c1.getArtifactId().equals(c2.getArtifactId())
&& c1.getVersion().equals(c2.getVersion())
Expand Down
Original file line number Diff line number Diff line change
@@ -1,40 +1,16 @@
package io.quarkus.domino.manifest;

import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;

import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver;
import io.quarkus.domino.ProjectDependencyConfig;
import io.quarkus.domino.ProjectDependencyResolver;
import io.quarkus.domino.test.repo.TestArtifactRepo;
import io.quarkus.domino.test.repo.TestProject;
import io.quarkus.maven.dependency.ArtifactCoords;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashSet;
import java.util.List;
import org.cyclonedx.exception.ParseException;
import org.cyclonedx.model.Bom;
import org.cyclonedx.model.Component;
import org.cyclonedx.model.Dependency;
import org.cyclonedx.parsers.JsonParser;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

public class CyclicDependencyGraphTest {

@TempDir
static Path testRepoDir;
static MavenArtifactResolver artifactResolver;

@BeforeAll
static void prepareRepo() {
var testRepo = TestArtifactRepo.of(testRepoDir);
artifactResolver = testRepo.getArtifactResolver();
public class CyclicDependencyGraphTest extends ManifestingTestBase {

@Override
protected void initRepo(TestArtifactRepo repo) {
var tcnativeProject = TestProject.of("io.netty", "1.0")
.setRepoUrl("https://netty.io/tcnative")
.setTag("1.0");
Expand All @@ -49,57 +25,23 @@ static void prepareRepo() {
.addDependency(ArtifactCoords.jar("io.netty", "netty-tcnative-boringssl-static", "osx-x86_64", " 1.0"))
.addClassifier("windows-x86_64")
.addDependency(ArtifactCoords.jar("io.netty", "netty-tcnative-boringssl-static", "windows-x86_64", " 1.0"));
testRepo.install(tcnativeProject);
repo.install(tcnativeProject);

var appProject = TestProject.of("org.acme", "1.0")
.setRepoUrl("https://acme.org/app")
.setTag("1.0");
appProject.createMainModule("acme-app")
.addDependency(ArtifactCoords.jar("io.netty", "netty-tcnative-boringssl-static", "linux-x86_64", "1.0"));
testRepo.install(appProject);
repo.install(appProject);
}

private static ProjectDependencyConfig.Mutable newDependencyConfig() {
return ProjectDependencyConfig.builder()
.setWarnOnMissingScm(true)
.setLegacyScmLocator(true);
@Override
protected ProjectDependencyConfig initConfig(ProjectDependencyConfig.Mutable config) {
return config.setProjectArtifacts(List.of(ArtifactCoords.jar("org.acme", "acme-app", "1.0")));
}

@Test
public void dependencyGraph() {
var config = newDependencyConfig()
.setProjectArtifacts(List.of(ArtifactCoords.jar("org.acme", "acme-app", "1.0")));

final Bom bom;
Path output = null;
try {
output = Files.createTempFile("domino-test", "sbom");

ProjectDependencyResolver.builder()
.setArtifactResolver(artifactResolver)
.setDependencyConfig(config)
.addDependencyTreeVisitor(new SbomGeneratingDependencyVisitor(
SbomGenerator.builder()
.setArtifactResolver(artifactResolver)
.setOutputFile(output)
.setEnableTransformers(false),
config))
.build()
.resolveDependencies();

try (BufferedReader reader = Files.newBufferedReader(output)) {
bom = new JsonParser().parse(reader);
} catch (ParseException e) {
throw new RuntimeException(e);
}
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (output != null) {
output.toFile().deleteOnExit();
}
}

@Override
protected void assertBom(Bom bom) {
assertDependencies(bom, ArtifactCoords.jar("org.acme", "acme-app", "1.0"),
ArtifactCoords.jar("io.netty", "netty-tcnative-boringssl-static", "linux-x86_64", "1.0"));

Expand All @@ -114,40 +56,4 @@ public void dependencyGraph() {
assertDependencies(bom, ArtifactCoords.jar("io.netty", "netty-tcnative-boringssl-static", "osx-x86_64", "1.0"));
assertDependencies(bom, ArtifactCoords.jar("io.netty", "netty-tcnative-boringssl-static", "windows-x86_64", "1.0"));
}

private static void assertDependencies(Bom bom, ArtifactCoords compCoords, ArtifactCoords... expectedDeps) {
var purl = PurgingDependencyTreeVisitor.getPurl(compCoords).toString();
Component comp = null;
for (var c : bom.getComponents()) {
if (c.getPurl().equals(purl)) {
comp = c;
break;
}
}
if (comp == null) {
fail("Failed to locate " + purl);
return;
}
Dependency dep = null;
for (var d : bom.getDependencies()) {
if (d.getRef().equals(comp.getBomRef())) {
dep = d;
break;
}
}
if (dep == null && expectedDeps.length > 0) {
fail(comp.getBomRef() + " has no dependencies manifested while expected " + expectedDeps.length);
return;
}
final List<Dependency> recordedDeps = dep == null ? List.of() : dep.getDependencies();
var recordedDepPurls = new HashSet<String>(recordedDeps.size());
for (var d : recordedDeps) {
recordedDepPurls.add(d.getRef());
}
var expectedDepPurls = new HashSet<String>(expectedDeps.length);
for (var c : expectedDeps) {
expectedDepPurls.add(PurgingDependencyTreeVisitor.getPurl(c).toString());
}
assertThat(recordedDepPurls).isEqualTo(expectedDepPurls);
}
}
Loading