Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

resolve #5610 add PrefixData, SubdirData, and PackageCacheData to conda/api.py #6922

Merged
merged 11 commits into from Feb 23, 2018
109 changes: 107 additions & 2 deletions conda/api.py
@@ -1,5 +1,110 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals

from .core.solve import Solver # NOQA
Solver = Solver # lgtm [py/redundant-assignment]
from .common.constants import NULL
from .core.package_cache_data import PackageCacheData as _PackageCacheData
from .core.prefix_data import PrefixData as _PrefixData
from .core.solve import Solver as _Solver, DepsModifier as _DepsModifier
from .core.subdir_data import SubdirData as _SubdirData
from .models.channel import Channel


DepsModifier = _DepsModifier


class Solver(object):
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need a docstring here, that also explains why this Solver wrapper is needed, or intended for?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Any recommendation on how to do this without duplicating everything that's already in the underlying Solver class? Plenty of nice docstrings here: https://github.com/conda/conda/blob/master/conda/core/solve.py#L52


def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=()):
self._internal = _Solver(prefix, channels, subdirs, specs_to_add, specs_to_remove)
Copy link
Collaborator

@goanpeca goanpeca Feb 23, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will only work for interactive scripting but we could do at least

    self.solve_final_state.__doc__ = self._internal.solve_final_state.__doc__

@kalefranz ?


def solve_final_state(self, deps_modifier=NULL, prune=NULL, ignore_pinned=NULL,
force_remove=NULL):
return self._internal.solve_final_state(deps_modifier, prune, ignore_pinned,
force_remove)

def solve_for_diff(self, deps_modifier=NULL, prune=NULL, ignore_pinned=NULL,
force_remove=NULL, force_reinstall=False):
return self._internal.solve_for_diff(deps_modifier, prune, ignore_pinned,
force_remove, force_reinstall)

def solve_for_transaction(self, deps_modifier=NULL, prune=NULL, ignore_pinned=NULL,
force_remove=NULL, force_reinstall=False):
return self._internal.solve_for_transaction(deps_modifier, prune, ignore_pinned,
force_remove, force_reinstall)


class SubdirData(object):

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We need a docstring here, that also explains why this Solver wrapper is needed, or intended for?

def __init__(self, channel):
assert isinstance(channel, Channel)
assert channel.subdir
assert not channel.package_filename
self._internal = _SubdirData(channel)

def query(self, package_ref_or_match_spec):
return tuple(self._internal.query(package_ref_or_match_spec))

@staticmethod
def query_all(channels, subdirs, package_ref_or_match_spec):
return tuple(_SubdirData.query_all(channels, subdirs, package_ref_or_match_spec))

def iter_records(self):
return self._internal.iter_records()

def reload(self):
self._internal = self._internal.reload()
return self


class PackageCacheData(object):

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same here

def __init__(self, pkgs_dir):
self._internal = _PackageCacheData(pkgs_dir)

def get(self, package_ref, default=NULL):
return self._internal.get(package_ref, default)

def query(self, package_ref_or_match_spec):
return tuple(self._internal.query(package_ref_or_match_spec))

@staticmethod
def query_all(package_ref_or_match_spec, pkgs_dirs=None):
return tuple(_PackageCacheData.query_all(package_ref_or_match_spec, pkgs_dirs))

def iter_records(self):
return self._internal.iter_records()

@property
def is_writable(self):
return self._internal.is_writable

@staticmethod
def first_writable(pkgs_dirs=None):
return PackageCacheData(_PackageCacheData.first_writable(pkgs_dirs).pkgs_dir)

def reload(self):
self._internal = self._internal.reload()
return self


class PrefixData(object):

def __init__(self, prefix_path):
self._internal = _PrefixData(prefix_path)

def get(self, package_ref, default=NULL):
return self._internal.get(package_ref.name, default)

def query(self, package_ref_or_match_spec):
return tuple(self._internal.query(package_ref_or_match_spec))

def iter_records(self):
return self._internal.iter_records()

@property
def is_writable(self):
return self._internal.is_writable

def reload(self):
self._internal = self._internal.reload()
return self
7 changes: 7 additions & 0 deletions conda/core/package_cache_data.py
Expand Up @@ -91,6 +91,10 @@ def load(self):
if package_cache_record:
_package_cache_records[package_cache_record] = package_cache_record

def reload(self):
self.load()
return self

def get(self, package_ref, default=NULL):
assert isinstance(package_ref, PackageRef)
try:
Expand Down Expand Up @@ -119,6 +123,9 @@ def query(self, package_ref_or_match_spec):
assert isinstance(param, PackageRef)
return (pcrec for pcrec in itervalues(self._package_cache_records) if pcrec == param)

def iter_records(self):
return iter(self._package_cache_records)

@classmethod
def query_all(cls, package_ref_or_match_spec, pkgs_dirs=None):
if pkgs_dirs is None:
Expand Down
14 changes: 12 additions & 2 deletions conda/core/prefix_data.py
Expand Up @@ -3,9 +3,9 @@

from glob import glob
from logging import getLogger
from os.path import join, lexists
from os.path import isfile, join, lexists

from ..base.constants import CONDA_TARBALL_EXTENSION
from ..base.constants import CONDA_TARBALL_EXTENSION, PREFIX_MAGIC_FILE
from ..base.context import context
from ..common.compat import JSONDecodeError, itervalues, string_types, with_metaclass
from ..common.constants import NULL
Expand All @@ -14,6 +14,7 @@
maybe_raise)
from ..gateways.disk.create import write_as_json_to_file
from ..gateways.disk.delete import rm_rf
from ..gateways.disk.test import file_path_is_writable
from ..models.dist import Dist
from ..models.index_record import PackageRef
from ..models.match_spec import MatchSpec
Expand Down Expand Up @@ -49,6 +50,10 @@ def load(self):
for meta_file in glob(join(self.prefix_path, 'conda-meta', '*.json')):
self._load_single_record(meta_file)

def reload(self):
self.load()
return self

def insert(self, prefix_record):
assert prefix_record.name not in self._prefix_records

Expand Down Expand Up @@ -134,6 +139,11 @@ def _load_single_record(self, prefix_record_json_path):
prefix_record = PrefixRecord(**json_data)
self.__prefix_records[prefix_record.name] = prefix_record

@property
def is_writable(self):
test_path = join(self.prefix_path, PREFIX_MAGIC_FILE)
return isfile(test_path) and file_path_is_writable(test_path)


def get_python_version_for_prefix(prefix):
# returns a string e.g. "2.7", "3.4", "3.5" or None
Expand Down
5 changes: 5 additions & 0 deletions conda/core/subdir_data.py
Expand Up @@ -124,6 +124,11 @@ def __init__(self, channel):
splitext(cache_fn_url(self.url_w_credentials))[0])
self._loaded = False

def reload(self):
self._loaded = False
self.load()
return self

@property
def cache_path_json(self):
return self.cache_path_base + '.json'
Expand Down