Skip to content

Commit

Permalink
New pkglist list manipuation command (#16022)
Browse files Browse the repository at this point in the history
* wip

* wip

* wip

* review

* added from_graph

* names

* review
  • Loading branch information
memsharded committed Apr 11, 2024
1 parent a2a8ebe commit 52f2435
Show file tree
Hide file tree
Showing 3 changed files with 207 additions and 1 deletion.
26 changes: 26 additions & 0 deletions conan/api/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ class MultiPackagesList:
def __init__(self):
self.lists = {}

def setdefault(self, key, default):
return self.lists.setdefault(key, default)

def __getitem__(self, name):
try:
return self.lists[name]
Expand All @@ -63,6 +66,10 @@ def serialize(self):
return {k: v.serialize() if isinstance(v, PackagesList) else v
for k, v in self.lists.items()}

def merge(self, other):
for k, v in other.lists.items():
self.lists.setdefault(k, PackagesList()).merge(v)

@staticmethod
def load(file):
content = json.loads(load(file))
Expand All @@ -76,9 +83,18 @@ def load(file):
pkglist.lists = result
return pkglist

@staticmethod
def from_graph(graph, graph_recipes=None, graph_binaries=None):
graph = {"graph": graph.serialize()}
return MultiPackagesList._define_graph(graph, graph_recipes, graph_binaries)

@staticmethod
def load_graph(graphfile, graph_recipes=None, graph_binaries=None):
graph = json.loads(load(graphfile))
return MultiPackagesList._define_graph(graph, graph_recipes, graph_binaries)

@staticmethod
def _define_graph(graph, graph_recipes=None, graph_binaries=None):
pkglist = MultiPackagesList()
cache_list = PackagesList()
if graph_recipes is None and graph_binaries is None:
Expand Down Expand Up @@ -143,6 +159,16 @@ class PackagesList:
def __init__(self):
self.recipes = {}

def merge(self, other):
def recursive_dict_update(d, u): # TODO: repeated from conandata.py
for k, v in u.items():
if isinstance(v, dict):
d[k] = recursive_dict_update(d.get(k, {}), v)
else:
d[k] = v
return d
recursive_dict_update(self.recipes, other.recipes)

def split(self):
"""
Returns a list of PackageList, splitted one per reference.
Expand Down
93 changes: 93 additions & 0 deletions conan/cli/commands/pkglist.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
import copy

from conan.api.conan_api import ConanAPI
from conan.api.model import MultiPackagesList, PackagesList
from conan.cli import make_abs_path
from conan.cli.command import conan_command, conan_subcommand
from conan.cli.commands.list import print_list_text, print_list_json
from conan.cli.formatters.list import list_packages_html
from conans.errors import NotFoundException


@conan_command(group="Consumer")
def pkglist(conan_api: ConanAPI, parser, *args): # noqa
"""
Several operations over package lists
"""


@conan_subcommand(formatters={"text": print_list_text,
"json": print_list_json,
"html": list_packages_html})
def pkglist_find_remote(conan_api, parser, subparser, *args):
"""
(Experimental) Find the remotes of a list of packages in the cache
"""
subparser.add_argument('list', help="Input package list")
subparser.add_argument("-r", "--remote", default=None, action="append",
help="Remote names. Accepts wildcards "
"('*' means all the remotes available)")
args = parser.parse_args(*args)

listfile = make_abs_path(args.list)
multi_pkglist = MultiPackagesList.load(listfile)
package_list = multi_pkglist["Local Cache"]
selected_remotes = conan_api.remotes.list(args.remote)

result = MultiPackagesList()
for r in selected_remotes:
result_pkg_list = PackagesList()
for ref, recipe_bundle in package_list.refs().items():
ref_no_rev = copy.copy(ref) # TODO: Improve ugly API
ref_no_rev.revision = None
try:
revs = conan_api.list.recipe_revisions(ref_no_rev, remote=r)
except NotFoundException:
continue
if ref not in revs: # not found
continue
result_pkg_list.add_refs([ref])
for pref, pref_bundle in package_list.prefs(ref, recipe_bundle).items():
pref_no_rev = copy.copy(pref) # TODO: Improve ugly API
pref_no_rev.revision = None
try:
prevs = conan_api.list.package_revisions(pref_no_rev, remote=r)
except NotFoundException:
continue
if pref in prevs:
result_pkg_list.add_prefs(ref, [pref])
info = recipe_bundle["packages"][pref.package_id]["info"]
result_pkg_list.add_configurations({pref: info})
if result_pkg_list.recipes:
result.add(r.name, result_pkg_list)

return {
"results": result.serialize(),
"conan_api": conan_api,
"cli_args": " ".join([f"{arg}={getattr(args, arg)}"
for arg in vars(args) if getattr(args, arg)])
}


@conan_subcommand(formatters={"text": print_list_text,
"json": print_list_json,
"html": list_packages_html})
def pkglist_merge(conan_api, parser, subparser, *args):
"""
(Experimental) Merge several package lists into a single one
"""
subparser.add_argument("-l", "--list", help="Package list file", action="append")
args = parser.parse_args(*args)

result = MultiPackagesList()
for pkglist in args.list:
listfile = make_abs_path(pkglist)
multi_pkglist = MultiPackagesList.load(listfile)
result.merge(multi_pkglist)

return {
"results": result.serialize(),
"conan_api": conan_api,
"cli_args": " ".join([f"{arg}={getattr(args, arg)}"
for arg in vars(args) if getattr(args, arg)])
}
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import json
from collections import OrderedDict

import pytest

from conans.test.assets.genconanfile import GenConanfile
from conans.test.utils.tools import TestClient
from conans.test.utils.tools import TestClient, TestServer


class TestListUpload:
Expand Down Expand Up @@ -168,6 +169,92 @@ def test_graph_pkg_list_only_built(self):
assert len(pkglist["default"]) == 2


class TestPkgListFindRemote:
""" we can recover a list of remotes for an already installed graph, for metadata download
"""
def test_graph_2_pkg_list_remotes(self):
servers = OrderedDict([("default", TestServer()), ("remote2", TestServer())])
c = TestClient(servers=servers, inputs=2 * ["admin", "password"], light=True)
c.save({"zlib/conanfile.py": GenConanfile("zlib", "1.0"),
"app/conanfile.py": GenConanfile("app", "1.0").with_requires("zlib/1.0")})
c.run("create zlib")
c.run("create app ")
c.run("upload zlib* -c -r=default")
c.run("upload zlib* -c -r=remote2")
c.run("upload app* -c -r=remote2")

# This install, packages will be in the cache
c.run("install --requires=app/1.0 --format=json", redirect_stdout="graph.json")
# So list, will not have remote at all
c.run("list --graph=graph.json --format=json", redirect_stdout="pkglist.json")

pkglist = json.loads(c.load("pkglist.json"))
assert len(pkglist["Local Cache"]) == 2
assert "default" not in pkglist # The remote doesn't even exist

# Lets now compute a list finding in the remotes
c.run("pkglist find-remote pkglist.json --format=json", redirect_stdout="remotepkg.json")
pkglist = json.loads(c.stdout)
assert "Local Cache" not in pkglist
assert len(pkglist["default"]) == 1
assert "zlib/1.0" in pkglist["default"]
assert len(pkglist["remote2"]) == 2
assert "app/1.0" in pkglist["remote2"]
assert "zlib/1.0" in pkglist["remote2"]

c.run("download --list=remotepkg.json -r=default --metadata=*")
assert "zlib/1.0: Retrieving recipe metadata from remote 'default'" in c.out
assert "zlib/1.0: Retrieving package metadata" in c.out
c.run("download --list=remotepkg.json -r=remote2 --metadata=*")
assert "app/1.0: Retrieving recipe metadata from remote 'remote2'" in c.out
assert "app/1.0: Retrieving package metadata" in c.out


class TestPkgListMerge:
""" deep merge lists
"""
def test_graph_2_pkg_list_remotes(self):
servers = OrderedDict([("default", TestServer()), ("remote2", TestServer())])
c = TestClient(servers=servers, inputs=2 * ["admin", "password"])
c.save({"zlib/conanfile.py": GenConanfile("zlib", "1.0").with_settings("build_type"),
"bzip2/conanfile.py": GenConanfile("bzip2", "1.0").with_settings("build_type"),
"app/conanfile.py": GenConanfile("app", "1.0").with_requires("zlib/1.0", "bzip2/1.0")
.with_settings("build_type")})
c.run("create zlib")
c.run("create bzip2")
c.run("create app ")

c.run("list zlib:* --format=json", redirect_stdout="list1.json")
c.run("list bzip2:* --format=json", redirect_stdout="list2.json")
c.run("list app:* --format=json", redirect_stdout="list3.json")
c.run("pkglist merge --list=list1.json --list=list2.json --list=list3.json --format=json",
redirect_stdout="release.json")
final = json.loads(c.stdout)
assert "app/1.0" in final["Local Cache"]
assert "zlib/1.0" in final["Local Cache"]
assert "bzip2/1.0" in final["Local Cache"]

c.run("create zlib -s build_type=Debug")
c.run("create bzip2 -s build_type=Debug")
c.run("create app -s build_type=Debug")
c.run("list *:* -fs build_type=Debug --format=json", redirect_stdout="debug.json")
c.run("pkglist merge --list=release.json --list=debug.json --format=json",
redirect_stdout="release.json")
final = json.loads(c.stdout)
rev = final["Local Cache"]["zlib/1.0"]["revisions"]["11f74ff5f006943c6945117511ac8b64"]
assert len(rev["packages"]) == 2 # Debug and Release
settings = rev["packages"]["efa83b160a55b033c4ea706ddb980cd708e3ba1b"]["info"]["settings"]
assert settings == {"build_type": "Release"}
settings = rev["packages"]["9e186f6d94c008b544af1569d1a6368d8339efc5"]["info"]["settings"]
assert settings == {"build_type": "Debug"}
rev = final["Local Cache"]["bzip2/1.0"]["revisions"]["9e0352b3eb99ba4ac79bc7eeae2102c5"]
assert len(rev["packages"]) == 2 # Debug and Release
settings = rev["packages"]["efa83b160a55b033c4ea706ddb980cd708e3ba1b"]["info"]["settings"]
assert settings == {"build_type": "Release"}
settings = rev["packages"]["9e186f6d94c008b544af1569d1a6368d8339efc5"]["info"]["settings"]
assert settings == {"build_type": "Debug"}


class TestDownloadUpload:
@pytest.fixture()
def client(self):
Expand Down

0 comments on commit 52f2435

Please sign in to comment.