diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..78ea7a385 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +octoprint_mrbeam/_version.py export-subst diff --git a/.gitignore b/.gitignore index ec9a7632a..bcd0a2ad6 100644 --- a/.gitignore +++ b/.gitignore @@ -40,3 +40,4 @@ venv/ tests/rsc/camera/debug/[^R]* tests/rsc/camera/out.jpg tests/logs/*.txt +*pytest-logs.txt diff --git a/MANIFEST.in b/MANIFEST.in index de6e71d88..f9b0e618c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,8 @@ include README.md +include versioneer.py +include octoprint_mrbeam/_version.py graft octoprint_mrbeam/templates graft octoprint_mrbeam/static graft octoprint_mrbeam/state graft octoprint_mrbeam/translations +graft octoprint_mrbeam/scripts diff --git a/octoprint_mrbeam/__init__.py b/octoprint_mrbeam/__init__.py index bfd1c1562..b0ae5321d 100644 --- a/octoprint_mrbeam/__init__.py +++ b/octoprint_mrbeam/__init__.py @@ -31,9 +31,15 @@ from octoprint.settings import settings from octoprint.events import Events as OctoPrintEvents +from octoprint_mrbeam.rest_handler.update_handler import UpdateRestHandlerMixin +from octoprint_mrbeam.util.connectivity_checker import ConnectivityChecker + IS_X86 = platform.machine() == "x86_64" +from ._version import get_versions + +__version__ = get_versions()["version"] +del get_versions -from octoprint_mrbeam.__version import __version__ from octoprint_mrbeam.iobeam.iobeam_handler import ioBeamHandler, IoBeamEvents from octoprint_mrbeam.iobeam.onebutton_handler import oneButtonHandler from octoprint_mrbeam.iobeam.interlock_handler import interLockHandler @@ -43,6 +49,7 @@ from octoprint_mrbeam.iobeam.hw_malfunction_handler import hwMalfunctionHandler from octoprint_mrbeam.iobeam.laserhead_handler import laserheadHandler from octoprint_mrbeam.iobeam.compressor_handler import compressor_handler +from octoprint_mrbeam.jinja.filter_loader import FilterLoader from octoprint_mrbeam.user_notification_system import user_notification_system from octoprint_mrbeam.analytics.analytics_handler import analyticsHandler from octoprint_mrbeam.analytics.usage_handler import usageHandler @@ -53,6 +60,10 @@ from octoprint_mrbeam.mrb_logger import init_mrb_logger, mrb_logger from octoprint_mrbeam.migrate import migrate from octoprint_mrbeam.os_health_care import os_health_care +from octoprint_mrbeam.rest_handler.docs_handler import DocsRestHandlerMixin +from octoprint_mrbeam.services.settings_service import SettingsService +from octoprint_mrbeam.services.burger_menu_service import BurgerMenuService +from octoprint_mrbeam.services.document_service import DocumentService from octoprint_mrbeam.wizard_config import WizardConfig from octoprint_mrbeam.printing.profile import ( laserCutterProfileManager, @@ -64,10 +75,8 @@ get_update_information, switch_software_channel, software_channels_available, - SW_UPDATE_TIER_PROD, - SW_UPDATE_TIER_BETA, - SW_UPDATE_TIER_DEV, BEAMOS_LEGACY_DATE, + SWUpdateTier, ) from octoprint_mrbeam.support import check_support_mode, check_calibration_tool_mode from octoprint_mrbeam.cli import get_cli_commands @@ -110,6 +119,8 @@ class MrBeamPlugin( octoprint.plugin.SlicerPlugin, octoprint.plugin.ShutdownPlugin, octoprint.plugin.EnvironmentDetectionPlugin, + UpdateRestHandlerMixin, + DocsRestHandlerMixin, ): # CONSTANTS ENV_PROD = "PROD" @@ -186,6 +197,9 @@ def __init__(self): # MrBeam Events needs to be registered in OctoPrint in order to be send to the frontend later on MrBeamEvents.register_with_octoprint() + # Jinja custom filters need to be loaded already on instance creation + FilterLoader.load_custom_jinja_filters() + # inside initialize() OctoPrint is already loaded, not assured during __init__()! def initialize(self): self._plugin_version = __version__ @@ -256,6 +270,10 @@ def initialize(self): self.mrbeam_plugin_initialized = True self.fire_event(MrBeamEvents.MRB_PLUGIN_INITIALIZED) + # move octoprints connectivity checker to a new var so we can use our abstraction + self._octoprint_connectivity_checker = self._connectivity_checker + self._connectivity_checker = ConnectivityChecker(self) + self._do_initial_log() def _init_frontend_logger(self): @@ -368,7 +386,7 @@ def get_settings_defaults(self): terminalMaxLines=2000, env=self.ENV_PROD, load_gremlins=False, - software_tier=SW_UPDATE_TIER_PROD, + software_tier=SWUpdateTier.STABLE.value, iobeam_disable_warnings=False, # for development on non-MrBeam devices suppress_migrations=False, # for development on non-MrBeam devices support_mode=False, @@ -454,7 +472,9 @@ def on_settings_load(self): dev=dict( env=self.get_env(), software_tier=self._settings.get(["dev", "software_tier"]), - software_tiers_available=software_channels_available(self), + software_tiers_available=[ + channel for channel in software_channels_available(self) + ], terminalMaxLines=self._settings.get(["dev", "terminalMaxLines"]), ), gcode_nextgen=dict( @@ -694,7 +714,7 @@ def get_assets(self): "css/hopscotch.min.css", "css/wizard.css", "css/tab_messages.css", - "css/software_update.css" + "css/software_update.css", ], less=["less/mrbeam.less"], ) @@ -818,6 +838,10 @@ def on_ui_render(self, now, request, render_kwargs): terminalEnabled=self._settings.get(["terminal"]) or self.support_mode, lasersafety_confirmation_dialog_version=self.LASERSAFETY_CONFIRMATION_DIALOG_VERSION, lasersafety_confirmation_dialog_language=language, + settings_model=SettingsService(self._logger, DocumentService(self._logger)).get_template_settings_model( + self.get_model_id()), + burger_menu_model=BurgerMenuService(self._logger, DocumentService(self._logger)).get_burger_menu_model( + self.get_model_id()), ) ) r = make_response(render_template("mrbeam_ui_index.jinja2", **render_kwargs)) @@ -2957,10 +2981,10 @@ def __calc_time_ntp_offset(self, log_out_of_sync=False): timer.start() def is_beta_channel(self): - return self._settings.get(["dev", "software_tier"]) == SW_UPDATE_TIER_BETA + return self._settings.get(["dev", "software_tier"]) == SWUpdateTier.BETA def is_develop_channel(self): - return self._settings.get(["dev", "software_tier"]) == SW_UPDATE_TIER_DEV + return self._settings.get(["dev", "software_tier"]) == SWUpdateTier.DEV def _get_mac_addresses(self): if not self._mac_addrs: diff --git a/octoprint_mrbeam/__version.py b/octoprint_mrbeam/__version.py deleted file mode 100644 index b2385cb40..000000000 --- a/octoprint_mrbeam/__version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.10.3" diff --git a/octoprint_mrbeam/_version.py b/octoprint_mrbeam/_version.py new file mode 100644 index 000000000..84f4bd3b2 --- /dev/null +++ b/octoprint_mrbeam/_version.py @@ -0,0 +1,520 @@ + +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440-post" + cfg.tag_prefix = "v" + cfg.parentdir_prefix = "" + cfg.versionfile_source = "octoprint_mrbeam/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} diff --git a/octoprint_mrbeam/analytics/analytics_handler.py b/octoprint_mrbeam/analytics/analytics_handler.py index 2ddffaf21..58801694b 100644 --- a/octoprint_mrbeam/analytics/analytics_handler.py +++ b/octoprint_mrbeam/analytics/analytics_handler.py @@ -36,7 +36,7 @@ def analyticsHandler(plugin): class AnalyticsHandler(object): QUEUE_MAXSIZE = 1000 - ANALYTICS_LOG_VERSION = 21 # bumped in 0.10.0 - added the laser head model to analytics and added triggerData to session_expired event + ANALYTICS_LOG_VERSION = 22 # bumped for SW-653 - added frontend event update_info_call_failure to see when this backend call fails def __init__(self, plugin): self._plugin = plugin diff --git a/octoprint_mrbeam/dependencies.txt b/octoprint_mrbeam/dependencies.txt new file mode 100644 index 000000000..ec8ffd62e --- /dev/null +++ b/octoprint_mrbeam/dependencies.txt @@ -0,0 +1,4 @@ +iobeam==1.0.0a0 +mrb-hw-info==1.0.0a0 +mrbeam-ledstrips==1.0.0a0 +mrbeamdoc==1.0.0a0 \ No newline at end of file diff --git a/octoprint_mrbeam/jinja/__init__.py b/octoprint_mrbeam/jinja/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/octoprint_mrbeam/jinja/filter/__init__.py b/octoprint_mrbeam/jinja/filter/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/octoprint_mrbeam/jinja/filter/sort_filters.py b/octoprint_mrbeam/jinja/filter/sort_filters.py new file mode 100644 index 000000000..bffeb8388 --- /dev/null +++ b/octoprint_mrbeam/jinja/filter/sort_filters.py @@ -0,0 +1,9 @@ +def sort_enum(list, attribute=None): + def find_value_for(value): + enum = value + if attribute: + for attr in attribute.split('.'): + enum = getattr(enum, attr) + return enum.value + + return sorted(list, key=lambda element: find_value_for(element)) diff --git a/octoprint_mrbeam/jinja/filter_loader.py b/octoprint_mrbeam/jinja/filter_loader.py new file mode 100644 index 000000000..62a2a8983 --- /dev/null +++ b/octoprint_mrbeam/jinja/filter_loader.py @@ -0,0 +1,12 @@ +import jinja2 + +from octoprint_mrbeam.jinja.filter.sort_filters import sort_enum + + +class FilterLoader: + def __init__(self): + pass + + @staticmethod + def load_custom_jinja_filters(): + jinja2.filters.FILTERS['sort_enum'] = sort_enum diff --git a/octoprint_mrbeam/migrate.py b/octoprint_mrbeam/migrate.py index 893090034..3739a173f 100644 --- a/octoprint_mrbeam/migrate.py +++ b/octoprint_mrbeam/migrate.py @@ -6,19 +6,21 @@ from datetime import datetime from distutils.version import LooseVersion, StrictVersion -from octoprint_mrbeam import IS_X86 +from enum import Enum + +from octoprint_mrbeam import IS_X86, __version__ from octoprint_mrbeam.software_update_information import BEAMOS_LEGACY_DATE from octoprint_mrbeam.mrb_logger import mrb_logger from octoprint_mrbeam.util.cmd_exec import exec_cmd, exec_cmd_output from octoprint_mrbeam.util import logExceptions from octoprint_mrbeam.printing.profile import laserCutterProfileManager from octoprint_mrbeam.printing.comm_acc2 import MachineCom -from octoprint_mrbeam.__version import __version__ from octoprint_mrbeam.materials import materials from octoprint_mrbeam.migration import ( MIGRATION_STATE, MigrationBaseClass, list_of_migrations, + MIGRATION_RESTART, ) @@ -75,6 +77,7 @@ def __init__(self, plugin): ) beamos_tier, self.beamos_date = self.plugin._device_info.get_beamos_version() self.beamos_version = self.plugin._device_info.get_beamos_version_number() + self._restart = MIGRATION_RESTART.NONE def run(self): try: @@ -336,6 +339,8 @@ def _run_migration(self): # if migration sucessfull append to executed successfull if migration.state == MIGRATION_STATE.migration_done: migration_executed[migration.id] = True + if migration.restart: + self.restart = migration.restart else: # mark migration as failed and skipp the following ones migration_executed[migration.id] = False @@ -343,6 +348,8 @@ def _run_migration(self): with open(migrations_json_file_path, "w") as f: f.write(json.dumps(migration_executed)) + + MigrationBaseClass.execute_restart(self.restart) except IOError: self._logger.error("migration execution file IO error") except MigrationException as e: @@ -404,6 +411,15 @@ def save_current_version(self): ) # force needed to save it if it wasn't there self.plugin._settings.save() + @property + def restart(self): + return self._restart + + @restart.setter + def restart(self, value): + if self._restart == 0 or value < self._restart: + self._restart = value + ########################################################## ##### general stuff ##### ########################################################## diff --git a/octoprint_mrbeam/migration/Mig002.py b/octoprint_mrbeam/migration/Mig002.py new file mode 100644 index 000000000..f6c28ba08 --- /dev/null +++ b/octoprint_mrbeam/migration/Mig002.py @@ -0,0 +1,74 @@ +from octoprint_mrbeam.migration.migration_base import ( + MigrationBaseClass, + MIGRATION_RESTART, +) + + +class Mig002EnableOnlineCheck(MigrationBaseClass): + """ + Migration for beamos versions 0.0.0 up to 0.18.2 to enable online check + """ + + BEAMOS_VERSION_LOW = "0.0.0" + BEAMOS_VERSION_HIGH = "0.18.2" + + def __init__(self, plugin): + """ + initalization of the migration 002 + + Args: + plugin: Mr Beam Plugin + """ + super(Mig002EnableOnlineCheck, self).__init__( + plugin, restart=MIGRATION_RESTART.OCTOPRINT + ) + + @property + def id(self): + """ + return the id of the migration + + Returns: + string: id of the migration + """ + return "002" + + def _run(self): + """ + migration steps executet during migration + + Returns: + None + """ + self._logger.debug("change config to enable online check") + self.plugin._settings.global_set( + ["server", "onlineCheck", "enabled"], + True, + ) + self.plugin._settings.global_set( + ["server", "onlineCheck", "host"], + "find.mr-beam.org", + ) + self.plugin._settings.global_set( + ["server", "onlineCheck", "port"], + "80", + ) + self.plugin._settings.save() + + super(Mig002EnableOnlineCheck, self)._run() + + def _rollback(self): + """ + rollback steps executet during rollback + + Returns: + None + """ + # self._logger.debug("disable online check") + self.plugin._settings.global_set( + ["server", "onlineCheck", "enabled"], + False, + ) + self.plugin._settings.save() + + super(Mig002EnableOnlineCheck, self)._rollback() diff --git a/octoprint_mrbeam/migration/__init__.py b/octoprint_mrbeam/migration/__init__.py index e5e8ae40a..4f28b8484 100644 --- a/octoprint_mrbeam/migration/__init__.py +++ b/octoprint_mrbeam/migration/__init__.py @@ -16,11 +16,16 @@ from octoprint_mrbeam.migration.migration_base import ( MigrationException as MigrationException, ) +from octoprint_mrbeam.migration.migration_base import ( + MIGRATION_RESTART as MIGRATION_RESTART, +) # this is for internal use from octoprint_mrbeam.migration.Mig001 import Mig001NetconnectdDisableLogDebugLevel +from octoprint_mrbeam.migration.Mig002 import Mig002EnableOnlineCheck # To add migrations they have to be added to this list till we automate it list_of_migrations = [ Mig001NetconnectdDisableLogDebugLevel, + Mig002EnableOnlineCheck, ] diff --git a/octoprint_mrbeam/migration/migration_base.py b/octoprint_mrbeam/migration/migration_base.py index 2c67272fe..c86fd5c87 100644 --- a/octoprint_mrbeam/migration/migration_base.py +++ b/octoprint_mrbeam/migration/migration_base.py @@ -24,6 +24,12 @@ class MIGRATION_STATE(enumerate): rollback_error = -2 +class MIGRATION_RESTART(enumerate): + NONE = 0 + DEVICE = 1 + OCTOPRINT = 2 + + class MigrationException(Exception): """ Exception that could occure during migration @@ -46,7 +52,7 @@ class MigrationBaseClass: # highest beamos version that should run the migration BEAMOS_VERSION_HIGH = None - def __init__(self, plugin): + def __init__(self, plugin, restart=MIGRATION_RESTART.NONE): """ initalization of the class @@ -58,6 +64,7 @@ def __init__(self, plugin): self._logger = mrb_logger( "octoprint.plugins.mrbeam.migrate." + self.__class__.__name__ ) + self.restart = restart @property @abstractmethod @@ -207,3 +214,20 @@ def exec_cmd(self, command): """ if not exec_cmd(command): raise MigrationException("error during migration for cmd:", command) + + @staticmethod + def execute_restart(restart): + logger = mrb_logger("octoprint.plugins.mrbeam.migrate.restart") + if restart: + if restart == MIGRATION_RESTART.OCTOPRINT: + logger.info("restart octoprint after migration") + exec_cmd("sudo systemctl restart octoprint.service") + elif restart == MIGRATION_RESTART.DEVICE: + logger.info("restart device after migration") + exec_cmd("sudo reboot now") + else: + logger.info( + "restart after migration choosen but unknown type: {}".format( + restart + ) + ) diff --git a/octoprint_mrbeam/model/__init__.py b/octoprint_mrbeam/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/octoprint_mrbeam/model/burger_menu_model.py b/octoprint_mrbeam/model/burger_menu_model.py new file mode 100644 index 000000000..762b7afb1 --- /dev/null +++ b/octoprint_mrbeam/model/burger_menu_model.py @@ -0,0 +1,10 @@ + +class BurgerMenuModel: + """ + Data object containing information to be displayed under the burger menu to be used on the jinja2 templates + """ + def __init__(self): + self.documents = set() + + def add_document(self, document): + self.documents.add(document) diff --git a/octoprint_mrbeam/model/document_model.py b/octoprint_mrbeam/model/document_model.py new file mode 100644 index 000000000..a91e47170 --- /dev/null +++ b/octoprint_mrbeam/model/document_model.py @@ -0,0 +1,35 @@ +class DocumentModel: + """ + Data object containing information documents to be used on the jinja2 templates + """ + def __init__(self, title, document_links): + self.title = title + self.document_links = document_links + + def __repr__(self): + return 'Document(title=%s, document_links=%s)' % ( + self.title, ','.join([repr(document_link) for document_link in self.document_links])) + + +class DocumentSimpleModel: + """ + Data object containing a simplified version of the information about documents to be used on the jinja2 templates + """ + def __init__(self, title, document_link): + self.title = title + self.document_link = document_link + + def __repr__(self): + return 'Document(title=%s, document_link=%s)' % (self.title, repr(self.document_link)) + + +class DocumentLinkModel: + """ + Data object containing information to be able to display a link to a document on the jinja2 templates + """ + def __init__(self, language, url): + self.language = language + self.url = url + + def __repr__(self): + return 'DocumentLink(language=%s, url=%s)' % (self.language, self.url) diff --git a/octoprint_mrbeam/model/settings_model.py b/octoprint_mrbeam/model/settings_model.py new file mode 100644 index 000000000..9dacbd38b --- /dev/null +++ b/octoprint_mrbeam/model/settings_model.py @@ -0,0 +1,20 @@ +class SettingsModel: + """ + Data object containing information about the settings to be used on the jinja2 templates + """ + def __init__(self): + self.about = None + + def __repr__(self): + return 'SettingsModel(about=%s)' % (repr(self.about)) + + +class AboutModel: + """ + Data object containing information corresponding to the about section to be used on the jinja2 templates + """ + def __init__(self, support_documents=[]): + self.support_documents = support_documents + + def __repr__(self): + return 'About(support_documents=%s)' % (','.join([repr(document) for document in self.support_documents])) diff --git a/octoprint_mrbeam/rest_handler/__init__.py b/octoprint_mrbeam/rest_handler/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/octoprint_mrbeam/rest_handler/docs_handler.py b/octoprint_mrbeam/rest_handler/docs_handler.py new file mode 100644 index 000000000..51008b297 --- /dev/null +++ b/octoprint_mrbeam/rest_handler/docs_handler.py @@ -0,0 +1,33 @@ +import octoprint.plugin +from flask import abort, send_file +from octoprint_mrbeamdoc.exception.mrbeam_doc_not_found import MrBeamDocNotFoundException +from octoprint_mrbeamdoc.utils.mrbeam_doc_utils import MrBeamDocUtils + + +class DocsRestHandlerMixin: + """ + This class contains all the rest handlers and endpoints related to handle docs + """ + + @octoprint.plugin.BlueprintPlugin.route( + "/docs///.", methods=["GET"]) + def get_doc(self, model, doctype, language, extension): + self._logger.debug( + 'Request to Model: %(model)s Doctype: %(doctype)s Language: %(language)s Extension:%(extension)s', + {'model': model, 'doctype': doctype, 'language': language, 'extension': extension}) + + mrbeam_model_found = MrBeamDocUtils.get_mrbeam_model_enum_for(model) + supported_language_found = MrBeamDocUtils.get_supported_language_enum_for(language) + mrbeam_doctype_found = MrBeamDocUtils.get_mrbeamdoc_type_enum_for(doctype) + + if mrbeam_model_found is None or supported_language_found is None or mrbeam_doctype_found is None: + abort(404) + + try: + mrbeamdoc = MrBeamDocUtils.get_mrbeamdoc_for(mrbeam_doctype_found, mrbeam_model_found, + supported_language_found, extension=extension) + except MrBeamDocNotFoundException as e: + self._logger.warn(e) + abort(404) + + return send_file(mrbeamdoc.get_file_reference(), attachment_filename=mrbeamdoc.get_file_name_with_extension()) diff --git a/octoprint_mrbeam/rest_handler/update_handler.py b/octoprint_mrbeam/rest_handler/update_handler.py new file mode 100644 index 000000000..3f8b27f82 --- /dev/null +++ b/octoprint_mrbeam/rest_handler/update_handler.py @@ -0,0 +1,16 @@ +from octoprint.server import NO_CONTENT + +import octoprint.plugin + +from octoprint_mrbeam.software_update_information import reload_update_info + + +class UpdateRestHandlerMixin: + """ + This class contains all the rest handlers and endpoints related to software update + """ + + @octoprint.plugin.BlueprintPlugin.route("/info/update", methods=["POST"]) + def update_update_informations(self): + reload_update_info(self) + return NO_CONTENT diff --git a/octoprint_mrbeam/scripts/update_script.py b/octoprint_mrbeam/scripts/update_script.py new file mode 100644 index 000000000..ec6844950 --- /dev/null +++ b/octoprint_mrbeam/scripts/update_script.py @@ -0,0 +1,485 @@ +from __future__ import absolute_import, division, print_function + +import json +import os +import re +import shutil +import subprocess +import sys +from io import BytesIO + +import zipfile +import requests +import argparse + +from octoprint.plugins.softwareupdate import exceptions + +from octoprint.settings import _default_basedir +from octoprint_mrbeam.mrb_logger import mrb_logger + +from octoprint_mrbeam.util.pip_util import get_version_of_pip_module, get_pip_caller +from requests.adapters import HTTPAdapter +from urllib3 import Retry +from urllib3.exceptions import MaxRetryError, ConnectionError + +_logger = mrb_logger("octoprint.plugins.mrbeam.softwareupdate.updatescript") + + +UPDATE_CONFIG_NAME = "mrbeam" +REPO_NAME = "MrBeamPlugin" +MAIN_SRC_FOLDER_NAME = "octoprint_mrbeam" +PLUGIN_NAME = "Mr_Beam" +DEFAULT_OPRINT_VENV = "/home/pi/oprint/bin/pip" +PIP_WHEEL_TEMP_FOLDER = "/tmp/wheelhouse" + + +def _parse_arguments(): + boolean_trues = ["true", "yes", "1"] + + parser = argparse.ArgumentParser(prog=__file__) + + parser.add_argument( + "--git", + action="store", + type=str, + dest="git_executable", + help="Specify git executable to use", + ) + parser.add_argument( + "--python", + action="store", + type=str, + dest="python_executable", + help="Specify python executable to use", + ) + parser.add_argument( + "--force", + action="store", + type=lambda x: x in boolean_trues, + dest="force", + default=False, + help="Set this to true to force the update to only the specified version (nothing newer, nothing older)", + ) + parser.add_argument( + "--sudo", action="store_true", dest="sudo", help="Install with sudo" + ) + parser.add_argument( + "--user", + action="store_true", + dest="user", + help="Install to the user site directory instead of the general site directory", + ) + parser.add_argument( + "--branch", + action="store", + type=str, + dest="branch", + default=None, + help="Specify the branch to make sure is checked out", + ) + parser.add_argument( + "--call", + action="store", + type=lambda x: x in boolean_trues, + dest="call", + default=False, + help="Calls the update methode", + ) + parser.add_argument( + "--archive", + action="store", + type=str, + dest="archive", + default=None, + help="Path of target zip file on local system", + ) + parser.add_argument( + "folder", + type=str, + help="Specify the base folder of the OctoPrint installation to update", + ) + parser.add_argument( + "target", type=str, help="Specify the commit or tag to which to update" + ) + + args = parser.parse_args() + + return args + + +def get_dependencies(path): + """ + return the dependencies saved in the + + Args: + path: path to the dependencies.txt file + + Returns: + list of dependencie dict [{"name", "version"}] + """ + dependencies_path = os.path.join(path, "dependencies.txt") + dependencies_pattern = r"([a-z]+(?:[_-][a-z]+)*)(.=)+(([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?)" + """ + Example: + input: iobeam==0.7.15 + mrb-hw-info==0.0.25 + mrbeam-ledstrips==0.2.2-alpha.2 + output: [[iobeam][==][0.7.15]] + [[mrb-hw-info][==][0.0.25]] + [[mrbeam-ledstrips][==][0.2.2-alpha.2]] + """ + try: + with open(dependencies_path, "r") as f: + dependencies_content = f.read() + dependencies = re.findall(dependencies_pattern, dependencies_content) + dependencies = [{"name": dep[0], "version": dep[2]} for dep in dependencies] + except IOError: + raise RuntimeError("Could not load dependencies") + return dependencies + + +def get_update_info(): + """ + returns the update info saved in the update_info.json file + """ + update_info_path = os.path.join(_default_basedir("OctoPrint"), "update_info.json") + try: + with open(update_info_path, "r") as f: + update_info = json.load(f) + except IOError: + raise RuntimeError("Could not load update info") + except ValueError as e: + raise RuntimeError("update info not valid json - {}".format(e)) + return update_info + + +def build_wheels(build_queue): + """ + build the wheels of the packages in the queue + + Args: + build_queue: dict of venvs with a list of packages to build the wheels + + Returns: + None + + """ + try: + if not os.path.isdir(PIP_WHEEL_TEMP_FOLDER): + os.mkdir(PIP_WHEEL_TEMP_FOLDER) + except OSError as e: + raise RuntimeError("can't create wheel tmp folder {} - {}".format(PIP_WHEEL_TEMP_FOLDER, e)) + + for venv, packages in build_queue.items(): + tmp_folder = os.path.join(PIP_WHEEL_TEMP_FOLDER, re.search(r"\w+((?=\/venv)|(?=\/bin))", venv).group(0)) + if os.path.isdir(tmp_folder): + try: + os.system("sudo rm -r {}".format(tmp_folder)) + except Exception as e: + raise RuntimeError("can't delete pip wheel temp folder {} - {}".format(tmp_folder, e)) + + pip_args = [ + "wheel", + "--disable-pip-version-check", + "--wheel-dir={}".format(tmp_folder), # Build wheels into , where the default is the current working directory. + "--no-dependencies", # Don't install package dependencies. + ] + for package in packages: + if package.get("archive"): + pip_args.append(package.get("archive")) + else: + raise RuntimeError("Archive not found for package {}".format(package)) + + returncode, exec_stdout, exec_stderr = get_pip_caller(venv, _logger).execute( + *pip_args + ) + if returncode != 0: + raise exceptions.UpdateError( + "Error while executing pip wheel", (exec_stdout, exec_stderr) + ) + + +def install_wheels(install_queue): + """ + installs the wheels in the given venv of the queue + + Args: + install_queue: dict of venvs with a list of packages to install + + Returns: + None + """ + if not isinstance(install_queue, dict): + raise RuntimeError("install queue is not a dict") + + for venv, packages in install_queue.items(): + tmp_folder = os.path.join(PIP_WHEEL_TEMP_FOLDER, re.search(r"\w+((?=\/venv)|(?=\/bin))", venv).group(0)) + pip_args = [ + "install", + "--disable-pip-version-check", + "--upgrade", # Upgrade all specified packages to the newest available version. The handling of dependencies depends on the upgrade-strategy used. + "--force-reinstall", # Reinstall all packages even if they are already up-to-date. + "--no-index", # Ignore package index (only looking at --find-links URLs instead). + "--find-links={}".format(tmp_folder), # If a URL or path to an html file, then parse for links to archives such as sdist (.tar.gz) or wheel (.whl) files. If a local path or file:// URL that's a directory, then look for archives in the directory listing. Links to VCS project URLs are not supported. + "--no-dependencies", # Don't install package dependencies. + ] + for package in packages: + pip_args.append( + "{package}".format( + package=package["name"] + ) + ) + + returncode, exec_stdout, exec_stderr = get_pip_caller(venv, _logger).execute( + *pip_args + ) + if returncode != 0: + raise exceptions.UpdateError( + "Error while executing pip install", (exec_stdout, exec_stderr) + ) + + +def build_queue(update_info, dependencies, plugin_archive): + """ + build the queue of packages to install + + Args: + update_info: a dict of informations how to update the packages + dependencies: a list dicts of dependencies [{"name", "version"}] + plugin_archive: path to archive of the plugin + + Returns: + install_queue: dict of venvs with a list of package dicts {"": [{"name", "archive", "target"}] + """ + install_queue = {} + + install_queue.setdefault( + update_info.get(UPDATE_CONFIG_NAME).get("pip_command", DEFAULT_OPRINT_VENV), [] + ).append( + { + "name": PLUGIN_NAME, + "archive": plugin_archive, + "target": '', + } + ) + print("dependencies - {}".format(dependencies)) + if dependencies: + for dependency in dependencies: + plugin_config = update_info.get(UPDATE_CONFIG_NAME) + plugin_dependencies_config = plugin_config.get("dependencies") + dependency_config = plugin_dependencies_config.get(dependency["name"]) + + # fail if requirements file contains dependencies but cloud config not + if dependency_config == None: + raise RuntimeError( + "no update info for dependency {}".format(dependency["name"]) + ) + + # override the dependency version from the dependencies files with the one from the cloud config + if dependency_config.get("version"): + version_needed = dependency_config.get("version") + else: + version_needed = dependency.get("version") + + if dependency_config.get("pip"): + archive = dependency_config["pip"].format( + target_version="v{version}".format(version=version_needed), + ) + else: + raise RuntimeError( + "pip not configured for {}".format(dependency["name"]) + ) + + installed_version = get_version_of_pip_module( + dependency["name"], + dependency_config.get("pip_command", DEFAULT_OPRINT_VENV), + ) + + if installed_version != version_needed: + install_queue.setdefault( + dependency_config.get("pip_command", DEFAULT_OPRINT_VENV), [] + ).append( + { + "name": dependency["name"], + "archive": archive, + "target": version_needed, + } + ) + else: + print( + "skip dependency {} as the target version {} is already installed".format( + dependency["name"], version_needed + ) + ) + return install_queue + + +def run_update(): + """ + collects the dependencies and the update info, builds the wheels and installs them in the correct venv + """ + + args = _parse_arguments() + + # get dependencies + dependencies = get_dependencies(args.folder) + + # get update config of dependencies + update_info = get_update_info() + + install_queue = build_queue( + update_info, dependencies, args.archive + ) + + print("install_queue", install_queue) + if install_queue is not None: + build_wheels(install_queue) + install_wheels(install_queue) + + +def retryget(url, retrys=3, backoff_factor=0.3): + """ + retrys the get times + + Args: + url: url to access + retrys: number of retrys + backoff_factor: factor for time between retrys + + Returns: + response + """ + try: + s = requests.Session() + retry = Retry(connect=retrys, backoff_factor=backoff_factor) + adapter = HTTPAdapter(max_retries=retry) + s.mount("https://", adapter) + s.keep_alive = False + + response = s.request("GET", url) + return response + except MaxRetryError: + raise RuntimeError("timeout while trying to get {}".format(url)) + except ConnectionError: + raise RuntimeError("connection error while trying to get {}".format(url)) + + +def loadPluginTarget(archive, folder): + """ + download the archive of the Plugin and copy dependencies and update script in the working directory + + Args: + archive: path of the archive to download and unzip + folder: working directory + + Returns: + zip_file_path - path of the downloaded zip file + """ + + # download target repo zip + req = retryget(archive) + filename = archive.split("/")[-1] + zip_file_path = os.path.join(folder, filename) + try: + with open(zip_file_path, "wb") as output_file: + output_file.write(req.content) + except IOError: + raise RuntimeError( + "Could not save the zip file to the working directory {}".format(folder) + ) + + # unzip repo + plugin_extracted_path = os.path.join(folder, UPDATE_CONFIG_NAME) + plugin_extracted_path_folder = os.path.join( + plugin_extracted_path, + "{repo_name}-{target}".format( + repo_name=REPO_NAME, target=re.sub(r"^v", "", filename.split(".zip")[0]) + ), + ) + try: + plugin_zipfile = zipfile.ZipFile(BytesIO(req.content)) + plugin_zipfile.extractall(plugin_extracted_path) + plugin_zipfile.close() + except (zipfile.BadZipfile, zipfile.LargeZipFile) as e: + raise RuntimeError("Could not unzip plugin repo - error: {}".format(e)) + + # copy new dependencies to working directory + try: + shutil.copy2( + os.path.join( + plugin_extracted_path_folder, MAIN_SRC_FOLDER_NAME, "dependencies.txt" + ), + os.path.join(folder, "dependencies.txt"), + ) + except IOError: + raise RuntimeError("Could not copy dependencies to working directory") + + # copy new update script to working directory + try: + shutil.copy2( + os.path.join( + plugin_extracted_path_folder, + MAIN_SRC_FOLDER_NAME, + "scripts/update_script.py", + ), + os.path.join(folder, "update_script.py"), + ) + except IOError: + raise RuntimeError("Could not copy update_script to working directory") + + return zip_file_path + + +def main(): + """ + loads the dependencies.txt and the update_script of the given target and executes the new update_script + + Args: + target: target of the Mr Beam Plugin to update to + call: if true executet the update itselfe + """ + + args = _parse_arguments() + if args.call: + if args.archive is None: + raise RuntimeError( + "Could not run update archive is missing" + ) + run_update() + else: + + folder = args.folder + + import os + + if not os.access(folder, os.W_OK): + raise RuntimeError("Could not update, base folder is not writable") + + update_info = get_update_info() + archive = loadPluginTarget( + update_info.get(UPDATE_CONFIG_NAME) + .get("pip") + .format(target_version=args.target), + folder, + ) + + # call new update script with args + sys.argv = [ + "--call=true", + "--archive={}".format(archive) + ] + sys.argv[1:] + try: + result = subprocess.call( + [sys.executable, os.path.join(folder, "update_script.py")] + sys.argv, + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as e: + print(e.output) + raise RuntimeError("error code %s", (e.returncode, e.output)) + + if result != 0: + raise RuntimeError("Error Could not update returncode - {}".format(result)) + + +if __name__ == "__main__": + main() diff --git a/octoprint_mrbeam/services/__init__.py b/octoprint_mrbeam/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/octoprint_mrbeam/services/burger_menu_service.py b/octoprint_mrbeam/services/burger_menu_service.py new file mode 100644 index 000000000..7cd0b9672 --- /dev/null +++ b/octoprint_mrbeam/services/burger_menu_service.py @@ -0,0 +1,39 @@ +from flask_babel import get_locale +from octoprint_mrbeamdoc.enum.supported_languages import SupportedLanguage +from octoprint_mrbeamdoc.utils.mrbeam_doc_utils import MrBeamDocUtils + +from octoprint_mrbeam.model.burger_menu_model import BurgerMenuModel + + +class BurgerMenuService: + """ + In this class we gather all the service layer calculations needed regarding the burger menu + """ + + def __init__(self, logger, document_service): + self._logger = logger + self._document_service = document_service + + def get_burger_menu_model(self, mrbeam_model): + """ + mrbeam_model String: Name of the running mrbeam_model + + Return BurgerMenuModel containing all the burger menu related information for this specific mrbeam_model + """ + mrbeam_model_found = MrBeamDocUtils.get_mrbeam_model_enum_for(mrbeam_model) + if mrbeam_model_found is None: + self._logger.error('MrBeamModel not identified %s', mrbeam_model) + return BurgerMenuModel() + + language_found = MrBeamDocUtils.get_supported_language_enum_for(get_locale().language) + if language_found is None: + language_found = SupportedLanguage.ENGLISH + + burger_model = BurgerMenuModel() + definitions = MrBeamDocUtils.get_mrbeam_definitions_for(mrbeam_model_found) + for definition in definitions: + language_found = language_found if definition.is_language_supported( + language_found) else SupportedLanguage.ENGLISH + document_simple = self._document_service.get_document_simple_for(definition, language_found) + burger_model.add_document(document_simple) + return burger_model diff --git a/octoprint_mrbeam/services/document_service.py b/octoprint_mrbeam/services/document_service.py new file mode 100644 index 000000000..be334ba56 --- /dev/null +++ b/octoprint_mrbeam/services/document_service.py @@ -0,0 +1,57 @@ +from flask_babel import get_locale, gettext + +from octoprint_mrbeam.model.document_model import DocumentLinkModel, DocumentModel, DocumentSimpleModel +from octoprint_mrbeam.util import string_util + + +class DocumentService: + """ + In this class we gather all the service layer calculations needed regarding documents + """ + + def __init__(self, logger): + self._logger = logger + + def get_documents_for(self, definition): + """ + Get document information corresponding to a definition + + definition MrBeamDocDefinition: definition of the document + + return DocumentModel corresponding to the requested params + """ + document_links = [DocumentLinkModel(language, self._get_url_for_definition_language(definition, language)) for + language in definition.supported_languages] + title_translated = self._get_title_translated(definition) + return DocumentModel(title_translated, document_links) + + def get_document_simple_for(self, definition, language): + """ + Get a simplified version of the document corresponding to a definition and language + + definition MrBeamDocDefinition: definition of the document + language SupportedLanguage: language of the document + + return DocumentSimpleModel corresponding to the requested params + """ + document_link = DocumentLinkModel(language, self._get_url_for_definition_language(definition, language)) + title_translated = self._get_title_translated(definition) + return DocumentSimpleModel(title_translated, document_link) + + def _get_title_translated(self, definition): + title_key = string_util.separate_camelcase_words(definition.mrbeamdoc_type.value) + title_translated = gettext(title_key) + if get_locale() is not None and get_locale().language != 'en' and title_key == title_translated: + self._logger.error( + 'No key found for title_key=%(title_key)s title_translated=%(title_translated)s' % { + 'title_key': title_key, + 'title_translated': title_translated}) + return title_translated + + @staticmethod + def _get_url_for_definition_language(definition, language, extension='pdf'): + return '/plugin/mrbeam/docs/%(mrbeam_model)s/%(language)s/%(mrbeam_type)s.%(extension)s' % { + 'mrbeam_model': definition.mrbeam_model.value, + 'language': language.value, + 'mrbeam_type': definition.mrbeamdoc_type.value, + 'extension': extension} diff --git a/octoprint_mrbeam/services/settings_service.py b/octoprint_mrbeam/services/settings_service.py new file mode 100644 index 000000000..8235992f5 --- /dev/null +++ b/octoprint_mrbeam/services/settings_service.py @@ -0,0 +1,35 @@ +from octoprint_mrbeamdoc.utils.mrbeam_doc_utils import MrBeamDocUtils + +from octoprint_mrbeam.model.settings_model import SettingsModel, AboutModel + + +class SettingsService: + """ + In this class we gather all the service layer calculations needed regarding settings + """ + + def __init__(self, logger, document_service): + self._logger = logger + self._document_service = document_service + + def get_template_settings_model(self, mrbeam_model): + """ + mrbeam_model String: Name of the running mrbeam_model + + Return SettingsModel containing all the information and settings available for this specific mrbeam_model + """ + mrbeam_model_found = MrBeamDocUtils.get_mrbeam_model_enum_for(mrbeam_model) + if mrbeam_model_found is None: + self._logger.error('MrBeamModel not identified %s', mrbeam_model) + return self._empty_settings_model() + + definitions = MrBeamDocUtils.get_mrbeam_definitions_for(mrbeam_model_found) + settings_model = SettingsModel() + settings_model.about = AboutModel( + support_documents=[self._document_service.get_documents_for(definition) for definition in definitions]) + return settings_model + + def _empty_settings_model(self): + settings_model = SettingsModel() + settings_model.about = AboutModel() + return settings_model diff --git a/octoprint_mrbeam/software_update_information.py b/octoprint_mrbeam/software_update_information.py index f2114cb44..d497269b0 100644 --- a/octoprint_mrbeam/software_update_information.py +++ b/octoprint_mrbeam/software_update_information.py @@ -1,27 +1,45 @@ -from datetime import datetime, date -import os, sys +import copy +import json +import operator +import os +from datetime import date + +import pkg_resources +from enum import Enum + +import semantic_version +import yaml +from octoprint.plugins.softwareupdate import exceptions as softwareupdate_exceptions +from requests import ConnectionError +from requests.adapters import HTTPAdapter, MaxRetryError +from semantic_version import Spec +from urllib3 import Retry -from octoprint.util import dict_merge -from octoprint_mrbeam import IS_X86 from octoprint_mrbeam.mrb_logger import mrb_logger -from octoprint_mrbeam.util import logExceptions +from octoprint_mrbeam.util import dict_merge, logExceptions +from octoprint_mrbeam.util.github_api import get_file_of_repo_for_tag from util.pip_util import get_version_of_pip_module -SW_UPDATE_TIER_PROD = "PROD" -SW_UPDATE_TIER_BETA = "BETA" -SW_UPDATE_TIER_ALPHA = "ALPHA" -SW_UPDATE_TIER_DEV = "DEV" -DEFAULT_REPO_BRANCH_ID = { - SW_UPDATE_TIER_PROD: "stable", - SW_UPDATE_TIER_BETA: "beta", - SW_UPDATE_TIER_ALPHA: "alpha", - SW_UPDATE_TIER_DEV: "develop", -} +class SWUpdateTier(Enum): + STABLE = "PROD" + BETA = "BETA" + ALPHA = "ALPHA" + DEV = "DEV" + -# add to the display name to modules that should be shown at the top of the list -SORT_UP_PREFIX = " " +SW_UPDATE_TIERS_DEV = [SWUpdateTier.ALPHA.value, SWUpdateTier.DEV.value] +SW_UPDATE_TIERS_PROD = [SWUpdateTier.STABLE.value, SWUpdateTier.BETA.value] +SW_UPDATE_TIERS = SW_UPDATE_TIERS_DEV + SW_UPDATE_TIERS_PROD +DEFAULT_REPO_BRANCH_ID = { + SWUpdateTier.STABLE.value: "stable", + SWUpdateTier.BETA.value: "beta", + SWUpdateTier.ALPHA.value: "alpha", + SWUpdateTier.DEV.value: "develop", +} +MAJOR_VERSION_CLOUD_CONFIG = 1 +SW_UPDATE_INFO_FILE_NAME = "update_info.json" _logger = mrb_logger("octoprint.plugins.mrbeam.software_update_information") @@ -32,308 +50,552 @@ GLOBAL_PIP_COMMAND = ( "sudo {}".format(GLOBAL_PIP_BIN) if os.path.isfile(GLOBAL_PIP_BIN) else None ) -# GLOBAL_PIP_COMMAND = "sudo {} -m pip".format(GLOBAL_PY_BIN) if os.path.isfile(GLOBAL_PY_BIN) else None # --disable-pip-version-check -# VENV_PIP_COMMAND = ("%s -m pip --disable-pip-version-check" % VENV_PY_BIN).split(' ') if os.path.isfile(VENV_PY_BIN) else None -BEAMOS_LEGACY_DATE = date(2018, 1, 12) + +BEAMOS_LEGACY_VERSION = "0.14.0" +BEAMOS_LEGACY_DATE = date(2018, 1, 12) # still used in the migrations + + +def get_tag_of_github_repo(repo): + """ + return the latest tag of a github repository + Args: + repo: repository name + + Returns: + latest tag of the given majorversion + """ + import requests + import json + + try: + url = "https://api.github.com/repos/mrbeam/{repo}/tags".format(repo=repo) + headers = { + "Accept": "application/json", + } + + s = requests.Session() + retry = Retry(connect=3, backoff_factor=0.3) + adapter = HTTPAdapter(max_retries=retry) + s.mount("https://", adapter) + s.keep_alive = False + + response = s.request("GET", url, headers=headers, timeout=3) + response.raise_for_status() # This will throw an exception if status is 4xx or 5xx + if response: + json_data = json.loads(response.text) + versionlist = [ + semantic_version.Version(version.get("name")[1:]) + for version in json_data + ] + majorversion = Spec( + "<{}.0.0".format(str(MAJOR_VERSION_CLOUD_CONFIG + 1)) + ) # simpleSpec("0.*.*") + return "v{}".format(majorversion.select(versionlist)) + else: + _logger.warning( + "no valid response for the tag of the update_config file {}".format( + response + ) + ) + return None + except MaxRetryError: + _logger.warning("timeout while trying to get the tag of the update_config file") + return None + except requests.HTTPError as e: + _logger.warning("server error {}".format(e)) + return None + except ConnectionError: + _logger.warning( + "connection error while trying to get the tag of the update_config file" + ) + return None def get_update_information(plugin): - result = dict() - - tier = plugin._settings.get(["dev", "software_tier"]) - beamos_tier, beamos_date = plugin._device_info.get_beamos_version() - _logger.info("SoftwareUpdate using tier: %s", tier) - - # The increased number of separate virtualenv for iobeam, netconnectd, ledstrips - # will increase the "discovery time" to find those package versions. - # "map-reduce" method can decrease lookup time by processing them in parallel - res = dict( - reduce( - dict_merge, - [ - _set_info_mrbeam_plugin(plugin, tier, beamos_date), - _set_info_mrbeamdoc(plugin, tier), - _set_info_netconnectd_plugin(plugin, tier, beamos_date), - _set_info_findmymrbeam(plugin, tier), - _set_info_mrbeamledstrips(plugin, tier, beamos_date), - _set_info_netconnectd_daemon(plugin, tier, beamos_date), - _set_info_iobeam(plugin, tier, beamos_date), - _set_info_mrb_hw_info(plugin, tier, beamos_date), - _config_octoprint(plugin, tier), - ], + """ + Gets called from the octoprint.plugin.softwareupdate.check_config Hook from Octoprint + Starts a thread to look online for a new config file + sets the config for the Octoprint Softwareupdate Plugin with the data from the config file + Args: + plugin: Mr Beam Plugin + + Returns: + the config for the Octoprint embedded softwareupdate Plugin + """ + try: + tier = plugin._settings.get(["dev", "software_tier"]) + beamos_version = plugin._device_info.get_beamos_version_number() + _logger.info( + "SoftwareUpdate using tier: {tier} {beamos_version}".format( + tier=tier, beamos_version=beamos_version + ) ) - ) - for pack, updt_info in res.items(): - _logger.debug( - "{} targets branch {} using pip {}".format( - pack, - updt_info.get("branch"), - updt_info.get("pip_command", "~/oprint/bin/pip"), + + if plugin._connectivity_checker.check_immediately(): + config_tag = get_tag_of_github_repo("beamos_config") + # if plugin._connectivity_checker.check_immediately(): # check if device online + if config_tag: + cloud_config = yaml.safe_load( + get_file_of_repo_for_tag( + repo="beamos_config", + file="docs/sw-update-conf.json", + tag=config_tag, + ) + ) + if cloud_config: + return _set_info_from_cloud_config( + plugin, tier, beamos_version, cloud_config + ) + else: + _logger.warn("no internet connection") + + user_notification_system = plugin.user_notification_system + user_notification_system.show_notifications( + user_notification_system.get_notification( + notification_id="missing_updateinformation_info", replay=False ) ) - return res + + # mark update config as dirty + sw_update_plugin = plugin._plugin_manager.get_plugin_info( + "softwareupdate" + ).implementation + _clear_version_cache(sw_update_plugin) + except Exception as e: + _logger.exception(e) + + return _set_info_from_cloud_config( + plugin, + tier, + beamos_version, + { + "default": {}, + "modules": { + "mrbeam": { + "name": " MrBeam Plugin", + "type": "github_commit", + "user": "", + "repo": "", + "pip": "", + }, + "netconnectd": { + "name": "OctoPrint-Netconnectd Plugin", + "type": "github_commit", + "user": "", + "repo": "", + "pip": "", + }, + "findmymrbeam": { + "name": "OctoPrint-FindMyMrBeam", + "type": "github_commit", + "user": "", + "repo": "", + "pip": "", + }, + }, + }, + ) + + +def _clear_version_cache(sw_update_plugin): + sw_update_plugin._version_cache = dict() + sw_update_plugin._version_cache_dirty = True def software_channels_available(plugin): - ret = [SW_UPDATE_TIER_PROD, SW_UPDATE_TIER_BETA] + """ + return the available software channels + Args: + plugin: Mr Beam Plugin + + Returns: + list of available software channels + """ + ret = copy.deepcopy(SW_UPDATE_TIERS_PROD) if plugin.is_dev_env(): # fmt: off - ret.extend([SW_UPDATE_TIER_ALPHA, SW_UPDATE_TIER_DEV,]) + ret += SW_UPDATE_TIERS_DEV # fmt: on return ret -@logExceptions def switch_software_channel(plugin, channel): + """ + Switches the Softwarechannel and triggers the reload of the config + Args: + plugin: Mr Beam Plugin + channel: the channel where to switch to + + Returns: + None + """ old_channel = plugin._settings.get(["dev", "software_tier"]) if channel in software_channels_available(plugin) and channel != old_channel: - _logger.info("Switching software channel to: %s", channel) + _logger.info("Switching software channel to: {channel}".format(channel=channel)) plugin._settings.set(["dev", "software_tier"], channel) - # fmt: off - sw_update_plugin = plugin._plugin_manager.get_plugin_info("softwareupdate").implementation - # fmt: on - sw_update_plugin._refresh_configured_checks = True - sw_update_plugin._version_cache = dict() - sw_update_plugin._version_cache_dirty = True - plugin.analytics_handler.add_software_channel_switch_event(old_channel, channel) - - -def _config_octoprint(plugin, tier): - prerelease_channel = None - type = "github_release" - if tier in [SW_UPDATE_TIER_ALPHA, SW_UPDATE_TIER_BETA]: - prerelease_channel = "mrbeam2-{tier}" - - elif tier in [SW_UPDATE_TIER_DEV]: - type = "github_commit" - - return _get_octo_plugin_description( - "octoprint", - tier, - plugin, - type=type, - displayName="OctoPrint", - prerelease=(tier in [SW_UPDATE_TIER_ALPHA, SW_UPDATE_TIER_BETA]), - prerelease_channel=prerelease_channel, - restart="octoprint", - pip="https://github.com/mrbeam/OctoPrint/archive/{target_version}.zip", - ) + reload_update_info(plugin) -def _set_info_mrbeam_plugin(plugin, tier, beamos_date): - branch = "mrbeam2-{tier}" - return _get_octo_plugin_description( - "mrbeam", - tier, - plugin, - displayName=SORT_UP_PREFIX + "MrBeam Plugin", - branch=branch, - branch_default=branch, - repo="MrBeamPlugin", - pip="https://github.com/mrbeam/MrBeamPlugin/archive/{target_version}.zip", - restart="octoprint", - ) +def reload_update_info(plugin): + """ + clears the version cache and refires the get_update_info hook + Args: + plugin: Mr Beam Plugin + Returns: + None + """ + _logger.debug("Reload update info") -def _set_info_mrbeamdoc(plugin, tier): - return _get_octo_plugin_description( - "mrbeamdoc", - tier, - plugin, - displayName="Mr Beam Documentation", - repo="MrBeamDoc", - pip="https://github.com/mrbeam/MrBeamDoc/archive/{target_version}.zip", - restart="octoprint", - ) + # fmt: off + sw_update_plugin = plugin._plugin_manager.get_plugin_info("softwareupdate").implementation + # fmt: on + sw_update_plugin._refresh_configured_checks = True + _clear_version_cache(sw_update_plugin) -def _set_info_netconnectd_plugin(plugin, tier, beamos_date): - branch = "mrbeam2-{tier}" - return _get_octo_plugin_description( - "netconnectd", - tier, - plugin, - displayName="OctoPrint-Netconnectd Plugin", - branch=branch, - branch_default=branch, - repo="OctoPrint-Netconnectd", - pip="https://github.com/mrbeam/OctoPrint-Netconnectd/archive/{target_version}.zip", - restart="octoprint", - ) +@logExceptions +def _set_info_from_cloud_config(plugin, tier, beamos_version, cloud_config): + """ + loads update info from the update_info.json file + the override order: default_settings->module_settings->tier_settings->beamos_settings + and if there are update_settings set in the config.yaml they will replace all of the module + the json file should look like: + { + "default": {} + "modules": { + : { + , + :{}, + "beamos_version": { + "X.X.X": {} # only supports major minor patch + } + } + "dependencies: {} + } + } + Args: + plugin: Mr Beam Plugin + tier: the software tier which should be used + beamos_version: the version of the running beamos + cloud_config: the update config from the cloud + + Returns: + software update information or None + """ + if cloud_config: + sw_update_config = dict() + _logger.debug("update_info {}".format(cloud_config)) + defaultsettings = cloud_config.get("default", None) + modules = cloud_config["modules"] + + try: + for module_id, module in modules.items(): + if tier in SW_UPDATE_TIERS: + sw_update_config[module_id] = {} + + module = dict_merge(defaultsettings, module) + + sw_update_config[module_id] = _generate_config_of_module( + module_id, module, defaultsettings, tier, beamos_version, plugin + ) + except softwareupdate_exceptions.ConfigurationInvalid as e: + _logger.exception("ConfigurationInvalid {}".format(e)) + user_notification_system = plugin.user_notification_system + user_notification_system.show_notifications( + user_notification_system.get_notification( + notification_id="update_fetching_information_err", + err_msg=["E-1003"], + replay=False, + ) + ) + _logger.debug("sw_update_config {}".format(sw_update_config)) -def _set_info_findmymrbeam(plugin, tier): - return _get_octo_plugin_description( - "findmymrbeam", - tier, - plugin, - displayName="OctoPrint-FindMyMrBeam", - repo="OctoPrint-FindMyMrBeam", - pip="https://github.com/mrbeam/OctoPrint-FindMyMrBeam/archive/{target_version}.zip", - restart="octoprint", - ) - + sw_update_file_path = os.path.join( + plugin._settings.getBaseFolder("base"), SW_UPDATE_INFO_FILE_NAME + ) + try: + with open(sw_update_file_path, "w") as f: + f.write(json.dumps(sw_update_config)) + except (IOError, TypeError): + plugin._logger.error("can't create update info file") + user_notification_system = plugin.user_notification_system + user_notification_system.show_notifications( + user_notification_system.get_notification( + notification_id="write_error_update_info_file_err", replay=False + ) + ) + return None -def _set_info_mrbeamledstrips(plugin, tier, beamos_date): - if beamos_date > BEAMOS_LEGACY_DATE: - pip_command = "sudo /usr/local/mrbeam_ledstrips/venv/bin/pip" + return sw_update_config else: - pip_command = GLOBAL_PIP_COMMAND - return _get_package_description_with_version( - "mrbeam-ledstrips", - tier, - package_name="mrbeam-ledstrips", - pip_command=pip_command, - displayName="MrBeam LED Strips", - repo="MrBeamLedStrips", - pip="https://github.com/mrbeam/MrBeamLedStrips/archive/{target_version}.zip", - ) + return None -def _set_info_netconnectd_daemon(plugin, tier, beamos_date): - if beamos_date > BEAMOS_LEGACY_DATE: - branch = "master" - pip_command = "sudo /usr/local/netconnectd/venv/bin/pip" - else: - branch = "mrbeam2-stable" - pip_command = GLOBAL_PIP_COMMAND - package_name = "netconnectd" - # get_package_description does not search for package version. - version = get_version_of_pip_module(package_name, pip_command) - # get_package_description does not force "develop" branch. - return _get_package_description( - module_id="netconnectd-daemon", - tier=tier, - package_name=package_name, - displayName="Netconnectd Daemon", - displayVersion=version, - repo="netconnectd_mrbeam", - branch=branch, - branch_default=branch, - pip="https://github.com/mrbeam/netconnectd_mrbeam/archive/{target_version}.zip", - pip_command=pip_command, - ) +def _generate_config_of_module( + module_id, input_moduleconfig, defaultsettings, tier, beamos_version, plugin +): + """ + generates the config of a software module + Args: + module_id: the id of the software module + input_moduleconfig: moduleconfig + defaultsettings: default settings + tier: software tier + beamos_version: version of the beamos + plugin: Mr Beam Plugin + + Returns: + software update informations for the module + """ + if tier in SW_UPDATE_TIERS: + # merge default settings and input is master + input_moduleconfig = dict_merge(defaultsettings, input_moduleconfig) + + # get update info for tier branch + tierversion = _get_tier_by_id(tier) + + if tierversion in input_moduleconfig: + input_moduleconfig = dict_merge( + input_moduleconfig, input_moduleconfig[tierversion] + ) # set tier config from default settings + + # have to be after the default config from file + + input_moduleconfig = dict_merge( + input_moduleconfig, + _generate_config_of_beamos(input_moduleconfig, beamos_version, tierversion), + ) + if "branch" in input_moduleconfig and "{tier}" in input_moduleconfig["branch"]: + input_moduleconfig["branch"] = input_moduleconfig["branch"].format( + tier=_get_tier_by_id(tier) + ) -def _set_info_iobeam(plugin, tier, beamos_date): - if beamos_date > BEAMOS_LEGACY_DATE: - pip_command = "sudo /usr/local/iobeam/venv/bin/pip" - else: - pip_command = GLOBAL_PIP_COMMAND - return _get_package_description_with_version( - module_id="iobeam", - tier=tier, - package_name="iobeam", - pip_command=pip_command, - displayName="iobeam", - type="bitbucket_commit", - repo="iobeam", - api_user="MrBeamDev", - api_password="v2T5pFkmdgDqbFBJAqrt", - pip="git+ssh://git@bitbucket.org/mrbeam/iobeam.git@{target_version}", - ) + if "update_script" in input_moduleconfig: + if "update_script_relative_path" not in input_moduleconfig: + raise softwareupdate_exceptions.ConfigurationInvalid( + "update_script_relative_path is missing in update config for {}".format( + module_id + ) + ) + try: + if not os.path.isdir(input_moduleconfig["update_folder"]): + os.makedirs(input_moduleconfig["update_folder"]) + except (IOError, OSError) as e: + _logger.error( + "could not create folder {} e:{}".format( + input_moduleconfig["update_folder"], e + ) + ) + user_notification_system = plugin.user_notification_system + user_notification_system.show_notifications( + user_notification_system.get_notification( + notification_id="update_fetching_information_err", + err_msg=["E-1002"], + replay=False, + ) + ) + update_script_path = os.path.join( + plugin._basefolder, input_moduleconfig["update_script_relative_path"] + ) + input_moduleconfig["update_script"] = input_moduleconfig[ + "update_script" + ].format(update_script=update_script_path) + current_version = _get_curent_version(input_moduleconfig, module_id, plugin) -def _set_info_mrb_hw_info(plugin, tier, beamos_date): - if beamos_date > BEAMOS_LEGACY_DATE: - pip_command = "sudo /usr/local/iobeam/venv/bin/pip" - else: - pip_command = GLOBAL_PIP_COMMAND - return _get_package_description_with_version( - module_id="mrb_hw_info", - tier=tier, - package_name="mrb-hw-info", - pip_command=pip_command, - displayName="mrb_hw_info", - type="bitbucket_commit", - repo="mrb_hw_info", - api_user="MrBeamDev", - api_password="v2T5pFkmdgDqbFBJAqrt", - pip="git+ssh://git@bitbucket.org/mrbeam/mrb_hw_info.git@{target_version}", - ) + if module_id != "octoprint": + _logger.debug( + "{module_id} current version: {current_version}".format( + module_id=module_id, current_version=current_version + ) + ) + input_moduleconfig["displayVersion"] = ( + current_version if current_version else "-" + ) + if "name" in input_moduleconfig: + input_moduleconfig["displayName"] = input_moduleconfig["name"] + + input_moduleconfig = _clean_update_config(input_moduleconfig) + + if "dependencies" in input_moduleconfig: + for dependencie_name, dependencie_config in input_moduleconfig[ + "dependencies" + ].items(): + input_moduleconfig["dependencies"][ + dependencie_name + ] = _generate_config_of_module( + dependencie_name, + dependencie_config, + {}, + tier, + beamos_version, + plugin, + ) + return input_moduleconfig + + +def _get_curent_version(input_moduleconfig, module_id, plugin): + """ + returns the version of the given module + + Args: + input_moduleconfig (dict): module to get the version for + module_id (str): id of the module + plugin (:obj:`OctoPrint Plugin`): Mr Beam Plugin + + Returns: + version of the module or None + """ + # get version number + current_version = None + if ( + "global_pip_command" in input_moduleconfig + and "pip_command" not in input_moduleconfig + ): + input_moduleconfig["pip_command"] = GLOBAL_PIP_COMMAND + if "pip_command" in input_moduleconfig: + # get version number of pip modules + pip_command = input_moduleconfig["pip_command"] + # if global_pip_command is set module is installed outside of our virtualenv therefor we can't use default pip command. + # /usr/local/lib/python2.7/dist-packages must be writable for pi user otherwise OctoPrint won't accept this as a valid pip command + # pip_command = GLOBAL_PIP_COMMAND + package_name = ( + input_moduleconfig["package_name"] + if "package_name" in input_moduleconfig + else module_id + ) + current_version_global_pip = get_version_of_pip_module( + package_name, pip_command + ) + if current_version_global_pip is not None: + current_version = current_version_global_pip -@logExceptions -def _get_octo_plugin_description(module_id, tier, plugin, **kwargs): - """Additionally get the version from plugin manager (doesn't it do that by default??)""" - # Commented pluginInfo -> If the module is not installed, then it Should be. - pluginInfo = plugin._plugin_manager.get_plugin_info(module_id) - if pluginInfo is None: - display_version = None else: - display_version = pluginInfo.version - if tier == SW_UPDATE_TIER_DEV: - # Fix: the develop branches are not formatted as "mrbeam2-{tier}" - _b = DEFAULT_REPO_BRANCH_ID[SW_UPDATE_TIER_DEV] - kwargs.update(branch=_b, branch_default=_b) - return _get_package_description( - module_id=module_id, tier=tier, displayVersion=display_version, **kwargs + # get versionnumber of octoprint plugin + pluginInfo = plugin._plugin_manager.get_plugin_info(module_id) + if pluginInfo is not None: + current_version = pluginInfo.version + return current_version + + +class VersionComperator: + """ + Version Comperator class to compare two versions with the compare method + """ + + def __init__(self, identifier, priority, compare): + self.identifier = identifier + self.priority = priority + self.compare = compare + + @staticmethod + def get_comperator(comparision_string, comparision_options): + """ + returns the comperator of the given list of VersionComperator with the matching identifier + + Args: + comparision_string (str): identifier to search for + comparision_options (list): list of VersionComperator objects + + Returns: + object: matching VersionComperator object + """ + for item in comparision_options: + if item.identifier == comparision_string: + return item + + +def _generate_config_of_beamos(moduleconfig, beamos_version, tierversion): + """ + generates the config for the given beamos_version of the tierversion + + Args: + moduleconfig (dict): update config of the module + beamos_version (str): version of the beamos + tierversion (str): software tier + + Returns: + dict: beamos config of the tierversion + """ + if "beamos_version" not in moduleconfig: + _logger.debug("no beamos_version set in moduleconfig") + return {} + + config_for_beamos_versions = moduleconfig.get("beamos_version") + + comparision_options = [ + VersionComperator("__eq__", 5, operator.eq), + VersionComperator("__le__", 4, operator.le), + VersionComperator("__lt__", 3, operator.lt), + VersionComperator("__ge__", 2, operator.ge), + VersionComperator("__gt__", 1, operator.gt), + ] + + sorted_config_for_beamos_versions = sorted( + config_for_beamos_versions.items(), + key=lambda com: VersionComperator.get_comperator( + com[0], comparision_options + ).priority, ) - -@logExceptions -def _get_package_description_with_version( - module_id, tier, package_name, pip_command, **kwargs -): - """Additionally get the version diplayed through pip_command""" - if tier == SW_UPDATE_TIER_DEV: - # Fix: the develop branches are not formatted as "mrbeam2-{tier}" - _b = DEFAULT_REPO_BRANCH_ID[SW_UPDATE_TIER_DEV] - kwargs.update(branch=_b, branch_default=_b) - - version = get_version_of_pip_module(package_name, pip_command) - if version: - kwargs.update(dict(displayVersion=version)) - - return _get_package_description( - module_id=module_id, tier=tier, pip_command=pip_command, **kwargs + config_for_beamos = get_config_for_version( + beamos_version, sorted_config_for_beamos_versions, comparision_options ) + if tierversion in config_for_beamos: + beamos_config_module_tier = config_for_beamos.get(tierversion) + config_for_beamos = dict_merge( + config_for_beamos, beamos_config_module_tier + ) # override tier config from tiers set in config_file -def _get_package_description( - module_id, - tier, - displayName=None, - type="github_commit", - user="mrbeam", - branch="mrbeam2-{tier}", - branch_default="mrbeam2-{tier}", - restart="environment", - prerelease_channel=None, - **kwargs -): - """Shorthand to create repo details for octoprint software update plugin to handle.""" - displayName = displayName or module_id - if "{tier}" in branch: - branch = branch.format(tier=get_tier_by_id(tier)) - if "{tier}" in branch_default: - branch_default = branch_default.format(tier=get_tier_by_id(tier)) - if prerelease_channel and "{tier}" in prerelease_channel: - kwargs.update(prerelease_channel=prerelease_channel.format(tier=get_tier_by_id(tier))) - if tier in (SW_UPDATE_TIER_DEV, SW_UPDATE_TIER_ALPHA): - # adds pip upgrade flag in the develop tier so it will do a upgrade even without a version bump - kwargs.update(pip_upgrade_flag=True) - update_info = dict( - tier=tier, - displayName=displayName, - user=user, - type=type, - branch=branch, - branch_default=branch_default, - restart=restart, - **kwargs - ) - return {module_id: update_info} - + return config_for_beamos -def get_tier_by_id(tier): - return DEFAULT_REPO_BRANCH_ID.get(tier, tier) +def get_config_for_version(target_version, config, comparision_options): + config_to_be_updated = {} + for comperator, version_config_items in config: + # sort the version config items by the version + sorted_version_config_items = sorted( + version_config_items.items(), + key=lambda version_config_tuple: pkg_resources.parse_version( + version_config_tuple[0] + ), + ) -def _is_override_in_settings(plugin, module_id): - settings_path = ["plugins", "softwareupdate", "checks", module_id, "override"] - is_override = plugin._settings.global_get(settings_path) - if is_override: - _logger.info("Module %s has overriding config in settings!", module_id) - return True - return False + for check_version, version_config in sorted_version_config_items: + if VersionComperator.get_comperator( + comperator, comparision_options + ).compare(target_version, check_version): + config_to_be_updated = dict_merge(config_to_be_updated, version_config) + return config_to_be_updated + + +def _clean_update_config(update_config): + """ + removes working parameters from the given config + Args: + update_config: update config information + + Returns: + cleaned version of the update config + """ + pop_list = ["alpha", "beta", "stable", "develop", "beamos_version", "name"] + for key in set(update_config).intersection(pop_list): + del update_config[key] + return update_config + + +def _get_tier_by_id(tier): + """ + returns the tier name with the given id + Args: + tier: id of the software tier + + Returns: + softwaretier name + """ + return DEFAULT_REPO_BRANCH_ID.get(tier, tier) diff --git a/octoprint_mrbeam/static/css/mrbeam.css b/octoprint_mrbeam/static/css/mrbeam.css index a077bfb62..30224dc15 100644 --- a/octoprint_mrbeam/static/css/mrbeam.css +++ b/octoprint_mrbeam/static/css/mrbeam.css @@ -2647,6 +2647,13 @@ input.search-query, bottom: 0; } +/* +hides the force update buttons + */ +#settings_plugin_softwareupdate_scroll_wrapper div:nth-of-type(4) { + display:none; +} + /* * === FRESHDESK FEEDBACK WIDGET === */ diff --git a/octoprint_mrbeam/static/js/design_store.js b/octoprint_mrbeam/static/js/design_store.js index 07c9aea18..53b671bf5 100644 --- a/octoprint_mrbeam/static/js/design_store.js +++ b/octoprint_mrbeam/static/js/design_store.js @@ -126,14 +126,21 @@ $(function () { $("#design_store_iframe").show(); $("#design_store_offline_placeholder").hide(); + // TODO: remove the following Version sanitization once the version + // comparative methods support "pep440" versioning (SW-1047) + // Regex to extract the base version from a version string + // 0.10.2-alpha --> 0.10.2 (SemVer) + // 0.11.78a0 --> 0.11.78 (PEP440) + // 0+unknown --> 0 (No version info) + let regexp = /([0-9]+(?:\.[0-9]+)*)/g; + let mrbeamPluginVersion = BEAMOS_VERSION.match(regexp)[0]; + console.log("Design store: Mr Beam Plugin Version: " + mrbeamPluginVersion ); + let userData = { email: self.getEmail(), serial: MRBEAM_SERIAL, user_token: self.getAuthToken(), - // TODO: remove the following sanitization (SW-1046) once the version - // comparative methods support "-hotfix..." verisoning (SW-1047) - // Remove any versioning characters after "-" like "-hotfix" - version: BEAMOS_VERSION.split("-")[0], + version: mrbeamPluginVersion, language: MRBEAM_LANGUAGE, last_uploaded: self.getLastUploadedDate(), }; diff --git a/octoprint_mrbeam/static/js/software_channel_selector.js b/octoprint_mrbeam/static/js/software_channel_selector.js index a50aa95e0..6b5d48fc9 100644 --- a/octoprint_mrbeam/static/js/software_channel_selector.js +++ b/octoprint_mrbeam/static/js/software_channel_selector.js @@ -6,6 +6,7 @@ $(function () { self.loginState = params[0]; self.settings = params[1]; self.softwareUpdate = params[2]; + self.analytics = params[3]; self.selector = ko.observable("PROD"); self.available_channels = ko.observableArray([]); @@ -91,6 +92,16 @@ $(function () { self.softwareUpdate.performCheck(true, false, true); }; + // get the hook when softwareUpdate perform the Updatecheck to force the update on the normal button + self.performCheck_copy = self.softwareUpdate.performCheck; + self.softwareUpdate.performCheck= function(showIfNothingNew, force, ignoreSeen) { + self.reload_update_info(); + if (force !== undefined) { + force = true; //only forces the update check if it was disabled ("check for update" button press) + } + self.performCheck_copy(showIfNothingNew, force, ignoreSeen); + }; + /** * This one wraps all content of the #settings_plugin_softwareupdate elem into a div * which makes the whole page scrollable. it's a bit tricky/dirty because the content comes from OP. @@ -116,18 +127,31 @@ $(function () { ); button.addClass("sticky-footer"); }; + self.reload_update_info = function(){ + OctoPrint.post("plugin/mrbeam/info/update") + .done(function (response) { + }) + .fail(function (error) { + console.error("Unable to reload update info."); + self.analytics.send_fontend_event("update_info_call_failure", {error_message: error}) + console.error("test"); + }); + } } + let DOM_ELEMENT_TO_BIND_TO = "software_channel_selector"; + // view model class, parameters for constructor, container to bind to OCTOPRINT_VIEWMODELS.push([ SoftwareChannelSelector, // e.g. loginStateViewModel, settingsViewModel, ... - ["loginStateViewModel", "settingsViewModel", "softwareUpdateViewModel"], + ["loginStateViewModel", "settingsViewModel", "softwareUpdateViewModel", "analyticsViewModel"], // e.g. #settings_plugin_mrbeam, #tab_plugin_mrbeam, ... [document.getElementById(DOM_ELEMENT_TO_BIND_TO)], ]); }); + diff --git a/octoprint_mrbeam/static/js/user_notification_viewmodel.js b/octoprint_mrbeam/static/js/user_notification_viewmodel.js index 8fbb03626..002dd9dc5 100644 --- a/octoprint_mrbeam/static/js/user_notification_viewmodel.js +++ b/octoprint_mrbeam/static/js/user_notification_viewmodel.js @@ -51,6 +51,30 @@ $(function () { type: "info", hide: true, }, + missing_updateinformation_info: { + title: gettext("No update information"), + text: gettext( + "No information about available updates could be retrieved, please try again later. Errorcode: E-1000" + ), + type: "info", + hide: false, + }, + write_error_update_info_file_err: { + title: gettext("Error during fetching update information"), + text: gettext( + "There was a error during fetching the update information Errorcode: E-1001" + ), + type: "error", + hide: false, + }, + update_fetching_information_err: { + title: gettext("Error during fetching update information"), + text: gettext( + "There was a error during fetching the update information, please try again later." + ), + type: "error", + hide: false, + }, err_cam_conn_err: { title: gettext("Camera Error"), text: gettext( diff --git a/octoprint_mrbeam/static/js/working_area.js b/octoprint_mrbeam/static/js/working_area.js index 0caecdbb7..07b192375 100644 --- a/octoprint_mrbeam/static/js/working_area.js +++ b/octoprint_mrbeam/static/js/working_area.js @@ -939,6 +939,14 @@ $(function () { newSvg.unref(true); + // remove non-visible text elements (no text, TODO: just whitespace) + const textElements = newSvg.selectAll("text"); + textElements.forEach((t) => { + const bb = t.getBBox(); + if (bb.width === 0 || bb.height === 0) { + t.remove(); + } + }); // handle texts var hasText = newSvg.selectAll("text,tspan"); if (hasText && hasText.length > 0) { diff --git a/octoprint_mrbeam/templates/mrbeam_ui_index.jinja2 b/octoprint_mrbeam/templates/mrbeam_ui_index.jinja2 index 2adb0a603..e44708596 100644 --- a/octoprint_mrbeam/templates/mrbeam_ui_index.jinja2 +++ b/octoprint_mrbeam/templates/mrbeam_ui_index.jinja2 @@ -98,13 +98,9 @@ {{ _('Exit Fullscreen') }}
  • - {% if model in ["MRBEAM2", "MRBEAM2_DC_R1"] %} -
  • {{ _('Quickstart Guide') }}
  • -
  • {{ _('User Manual') }}
  • - {% else %} -
  • {{ _('Quickstart Guide') }}
  • -
  • {{ _('User Manual') }}
  • - {% endif %} + {% for document in burger_menu_model.documents %} +
  • {{ document.title }}
  • + {% endfor %}
  • {{ _('Support') }}
  • diff --git a/octoprint_mrbeam/templates/settings/about_settings.jinja2 b/octoprint_mrbeam/templates/settings/about_settings.jinja2 index b31a05071..3abda45c0 100644 --- a/octoprint_mrbeam/templates/settings/about_settings.jinja2 +++ b/octoprint_mrbeam/templates/settings/about_settings.jinja2 @@ -67,35 +67,15 @@
    {{ _('Documentation, Support and Privacy') }}
      -
      {{ _('Quickstart Guide') }}: - {% if model in ["MRBEAM2", "MRBEAM2_DC_R1"] %} - en | - de | - {% else %} - en | - de | - {% endif %} - {{ _('get the latest versions') }} {{ _('online') }} -
      -
      {{ _('User Manual') }}: - {% if model in ["MRBEAM2", "MRBEAM2_DC_R1"] %} - en | - de | - es | - fr | - it | - fi | - {% else %} - en | - de | - es | - fr | - it | - nl | - fi | - {% endif %} - {{ _('get the latest versions') }} {{ _('online') }} -
      + {% for document in settings_model.about.support_documents %} +
      + {{ document.title }} + {% for link in document.document_links | sort_enum(attribute='language') %} + {{ link.language.value }} | + {% endfor %} + {{ _('get the latest versions') }} {{ _('online') }} +
      + {% endfor %}
      {{ _('Online Support Portal') }}: mr-beam.org/support
      {{ _('Privacy Policies') }}: diff --git a/octoprint_mrbeam/util/connectivity_checker.py b/octoprint_mrbeam/util/connectivity_checker.py new file mode 100644 index 000000000..8e4ab5a7b --- /dev/null +++ b/octoprint_mrbeam/util/connectivity_checker.py @@ -0,0 +1,48 @@ +import threading +import logging + + +class ConnectivityChecker(object): + """Abstraction of Octoprints connectivity checker 'util/__init__.py #1300'""" + + def __init__(self, plugin): + self._check_worker = None + self._check_mutex = threading.RLock() + self._plugin = plugin + + self._logger = logging.getLogger( + "octoprint.plugins." + __name__ + ".connectivity_checker" + ) + + @property + def online(self): + """ + + Args: + + Returns: + boolean: is the device connected to the internet, returns None if the octoprint checker is disabled + + """ + with self._check_mutex: + # if the octoprint connectivity checker is disabled return None instead of true + if self._plugin._octoprint_connectivity_checker.enabled: + # returns the value of the octoprint connectifity checker, this value returns true if the octoprint onlinechecker is disabled + return self._plugin._octoprint_connectivity_checker.online + else: + return None + + def check_immediately(self): + """ + checks immediatley for a internet connection and don't wait for the interval + + Args: + + Returns: + boolean: is the device connected to the internet + + """ + with self._check_mutex: + # calls the octoprint check_immediately methode to run the checker immediately + self._plugin._octoprint_connectivity_checker.check_immediately() + return self.online diff --git a/octoprint_mrbeam/util/github_api.py b/octoprint_mrbeam/util/github_api.py new file mode 100644 index 000000000..cf277b509 --- /dev/null +++ b/octoprint_mrbeam/util/github_api.py @@ -0,0 +1,58 @@ +""" +This util contains all the necessary methods to communicate with the github api +""" +import base64 + +from requests.adapters import HTTPAdapter, MaxRetryError +from requests import ConnectionError +from urllib3 import Retry + +from octoprint_mrbeam.mrb_logger import mrb_logger +import requests +import json + +_logger = mrb_logger("octoprint.plugins.mrbeam.util.github_api") + + +def get_file_of_repo_for_tag(file, repo, tag): + """ + return the content of the of the repo for the given tag/branch/hash + + Args: + file: file + tag: tag/branch/hash + repo: github repository + + Returns: + content of file + """ + try: + url = "https://api.github.com/repos/mrbeam/{repo}/contents/{file}?ref={tag}".format( + repo=repo, file=file, tag=tag + ) + + headers = { + "Accept": "application/json", + } + + s = requests.Session() + retry = Retry(connect=3, backoff_factor=0.3) + adapter = HTTPAdapter(max_retries=retry) + s.mount("https://", adapter) + s.keep_alive = False + + response = s.request("GET", url, headers=headers) + except MaxRetryError: + _logger.warning("timeout while trying to get the file") + return None + except ConnectionError: + _logger.warning("connection error while trying to get the file") + return None + + if response: + json_data = json.loads(response.text) + content = base64.b64decode(json_data["content"]) + return content + else: + _logger.warning("no valid response for the file - {}".format(response)) + return None diff --git a/octoprint_mrbeam/util/pip_util.py b/octoprint_mrbeam/util/pip_util.py index 491a0a265..d6a65818c 100644 --- a/octoprint_mrbeam/util/pip_util.py +++ b/octoprint_mrbeam/util/pip_util.py @@ -1,3 +1,5 @@ +from octoprint.plugins.softwareupdate.updaters.pip import _get_pip_caller +from octoprint.util.pip import PipCaller from octoprint_mrbeam.mrb_logger import mrb_logger from cmd_exec import exec_cmd_output @@ -62,3 +64,41 @@ def get_version_of_pip_module(pip_name, pip_command=None, disable_pip_ver_check= break _logger.debug("%s==%s", pip_name, version) return version + + +def get_pip_caller(venv, _logger=None): + """ + gets the pip caller of the givenv venv + + Args: + venv: path to venv + _logger: logger to log call, stdout and stderr of the pip caller + + Returns: + PipCaller of the venv + """ + pip_caller = _get_pip_caller(command=venv) + if not isinstance(pip_caller, PipCaller): + raise RuntimeError("Can't run pip", None) + + def _log_call(*lines): + _log(lines, prefix=" ", stream="call") + + def _log_stdout(*lines): + _log(lines, prefix=">", stream="stdout") + + def _log_stderr(*lines): + _log(lines, prefix="!", stream="stderr") + + def _log(lines, prefix=None, stream=None, strip=True): + if strip: + lines = map(lambda x: x.strip(), lines) + for line in lines: + print(u"{} {}".format(prefix, line)) + + if _logger is not None: + pip_caller.on_log_call = _log_call + pip_caller.on_log_stdout = _log_stdout + pip_caller.on_log_stderr = _log_stderr + + return pip_caller diff --git a/octoprint_mrbeam/util/string_util.py b/octoprint_mrbeam/util/string_util.py new file mode 100644 index 000000000..757790dcc --- /dev/null +++ b/octoprint_mrbeam/util/string_util.py @@ -0,0 +1,11 @@ +import re + + +def separate_camelcase_words(string, separator=' '): + if string is None: + return '' + return remove_extra_spaces(re.sub(r"(\w)([A-Z])", r"\1" + separator + r"\2", string)) + + +def remove_extra_spaces(string): + return re.sub(' +', ' ', string).strip() diff --git a/pytest.ini b/pytest.ini index 5410868eb..4a0c61b46 100644 --- a/pytest.ini +++ b/pytest.ini @@ -8,7 +8,7 @@ log_cli = True log_cli_format = %(asctime)s %(levelname)s %(message)s log_cli_level = DEBUG log_date_format = %H:%M:%S -log_file = tests/logs/pytest-logs.txt +log_file = pytest-logs.txt log_file_date_format = %Y-%m-%d %H:%M:%S log_file_format = %(asctime)s %(levelname)s %(message)s log_file_level = INFO diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 000000000..9042a9b78 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,15 @@ + +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[metadata] +description-file = README.md + +[versioneer] +VCS = git +style = pep440-post +versionfile_source = octoprint_mrbeam/_version.py +versionfile_build = octoprint_mrbeam/_version.py +tag_prefix = v +parentdir_prefix = diff --git a/setup.py b/setup.py index 86ea6791a..4d751c8ee 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,5 @@ # coding=utf-8 - - -execfile("octoprint_mrbeam/__version.py") +import versioneer ######################################################################################################################## ### Do not forget to adjust the following variables to your own plugin. @@ -17,7 +15,7 @@ plugin_name = "Mr_Beam" # The plugin's version. Can be overwritten within OctoPrint's internal data via __plugin_version__ in the plugin module -plugin_version = __version__ +plugin_version = versioneer.get_version() # The plugin's description. Can be overwritten within OctoPrint's internal data via __plugin_description__ in the plugin # module @@ -48,6 +46,8 @@ "pillow", "lxml", "numpy", + "pyyaml", + "enum34", picamera, ] @@ -106,6 +106,7 @@ package=plugin_package, name=plugin_name, version=plugin_version, + cmdclass=versioneer.get_cmdclass(), description=plugin_description, author=plugin_author, mail=plugin_author_email, diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/logger/__init__.py b/tests/logger/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/logger/test_logger.py b/tests/logger/test_logger.py new file mode 100644 index 000000000..815b0bb29 --- /dev/null +++ b/tests/logger/test_logger.py @@ -0,0 +1,33 @@ +class LoggerMock: + def __init__(self): + pass + + def comm(self, msg, *args, **kwargs): + pass + + def debug(self, msg, *args, **kwargs): + pass + + def info(self, msg, *args, **kwargs): + pass + + def warn(self, msg, *args, **kwargs): + pass + + def warning(self, msg, *args, **kwargs): + pass + + def error(self, msg, *args, **kwargs): + pass + + def exception(self, msg, *args, **kwargs): + pass + + def critical(self, msg, *args, **kwargs): + pass + + def setLevel(self, *args, **kwargs): + pass + + def log(self, level, msg, *args, **kwargs): + pass diff --git a/tests/migrations/test-migration-Mig001.py b/tests/migrations/test-migration-Mig001.py index d85a2e407..00f6f8b8e 100644 --- a/tests/migrations/test-migration-Mig001.py +++ b/tests/migrations/test-migration-Mig001.py @@ -6,23 +6,37 @@ class TestMigrationMig001(unittest.TestCase): """ Testclass for the migration Mig001 """ + def setUp(self): self.m001 = Mig001NetconnectdDisableLogDebugLevel(None) def test_beamos_versions(self): # beamos versions where the migration should not run - self.assertFalse(self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0.14.0")) - self.assertFalse(self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0.18.2")) + self.assertFalse( + self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0.14.0") + ) + self.assertFalse( + self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0.18.2") + ) # beamos versions where the migration should run - self.assertTrue(self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0.18.0")) - self.assertTrue(self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0.18.1")) + self.assertTrue( + self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0.18.0") + ) + self.assertTrue( + self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0.18.1") + ) # not matching pattern strings - self.assertFalse(self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, None)) - self.assertFalse(self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "14.0")) - self.assertFalse(self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0")) + self.assertFalse( + self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, None) + ) + self.assertFalse( + self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "14.0") + ) + self.assertFalse( + self.m001.shouldrun(Mig001NetconnectdDisableLogDebugLevel, "0") + ) def test_migration_id(self): - self.assertEqual(self.m001.id, '001') - + self.assertEqual(self.m001.id, "001") diff --git a/tests/migrations/test-migration-Mig002.py b/tests/migrations/test-migration-Mig002.py new file mode 100644 index 000000000..56475ed48 --- /dev/null +++ b/tests/migrations/test-migration-Mig002.py @@ -0,0 +1,29 @@ +from octoprint_mrbeam.migration.Mig002 import Mig002EnableOnlineCheck +import unittest + + +class TestMigrationMig002(unittest.TestCase): + """ + Testclass for the migration Mig001 + """ + + def setUp(self): + self.m002 = Mig002EnableOnlineCheck(None) + + def test_beamos_versions(self): + # beamos versions where the migration should not run + self.assertFalse(self.m002.shouldrun(Mig002EnableOnlineCheck, "0.18.3")) + + # beamos versions where the migration should run + self.assertTrue(self.m002.shouldrun(Mig002EnableOnlineCheck, "0.14.0")) + self.assertTrue(self.m002.shouldrun(Mig002EnableOnlineCheck, "0.18.0")) + self.assertTrue(self.m002.shouldrun(Mig002EnableOnlineCheck, "0.18.1")) + self.assertTrue(self.m002.shouldrun(Mig002EnableOnlineCheck, "0.18.2")) + + # not matching pattern strings + self.assertFalse(self.m002.shouldrun(Mig002EnableOnlineCheck, None)) + self.assertFalse(self.m002.shouldrun(Mig002EnableOnlineCheck, "14.0")) + self.assertFalse(self.m002.shouldrun(Mig002EnableOnlineCheck, "0")) + + def test_migration_id(self): + self.assertEqual(self.m002.id, "002") diff --git a/tests/rest_handler/__init__.py b/tests/rest_handler/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/rest_handler/test_docs_handler.py b/tests/rest_handler/test_docs_handler.py new file mode 100644 index 000000000..c98df51bd --- /dev/null +++ b/tests/rest_handler/test_docs_handler.py @@ -0,0 +1,45 @@ +from unittest import TestCase + +from mock import patch, MagicMock +from octoprint_mrbeamdoc.enum.mrbeam_doctype import MrBeamDocType +from octoprint_mrbeamdoc.enum.mrbeam_model import MrBeamModel +from octoprint_mrbeamdoc.enum.supported_languages import SupportedLanguage +from werkzeug.exceptions import NotFound + +from octoprint_mrbeam import DocsRestHandlerMixin +from tests.logger.test_logger import LoggerMock + + +class TestDocsRestHandlerMixin(TestCase): + def setUp(self): + super(TestDocsRestHandlerMixin, self).setUp() + self.docs_handler = DocsRestHandlerMixin() + self.docs_handler._logger = LoggerMock() + + def test_unknown_model__then_returns_not_found(self): + self.assertRaises(NotFound, self.docs_handler.get_doc, + 'unknown', + MrBeamDocType.QUICKSTART_GUIDE.value, + SupportedLanguage.ENGLISH.value, + 'pdf') + + def test_unknown_type__then_returns_not_found(self): + self.assertRaises(NotFound, self.docs_handler.get_doc, + MrBeamModel.MRBEAM2.value, + 'unknown', + SupportedLanguage.ENGLISH.value, + 'pdf') + + def test_unsupported_language__then_returns_not_found(self): + self.assertRaises(NotFound, self.docs_handler.get_doc, + MrBeamModel.MRBEAM2.value, + MrBeamDocType.QUICKSTART_GUIDE.value, + 'unsupported', + 'pdf') + + @patch('octoprint_mrbeam.rest_handler.docs_handler.send_file') + def test_existing_file_request__then_send_file_is_called(self, send_file_mock): + send_file_mock.return_value = MagicMock(status_code=200, response='') + doc = self.docs_handler.get_doc(MrBeamModel.MRBEAM2.value, MrBeamDocType.QUICKSTART_GUIDE.value, + SupportedLanguage.ENGLISH.value, 'pdf') + send_file_mock.assert_called_once() diff --git a/tests/services/__init__.py b/tests/services/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/services/test_burger_menu_service.py b/tests/services/test_burger_menu_service.py new file mode 100644 index 000000000..3551d5306 --- /dev/null +++ b/tests/services/test_burger_menu_service.py @@ -0,0 +1,54 @@ +from unittest import TestCase + +from mock import patch, MagicMock +from octoprint_mrbeamdoc.enum.mrbeam_doctype import MrBeamDocType +from octoprint_mrbeamdoc.enum.mrbeam_model import MrBeamModel +from octoprint_mrbeamdoc.enum.supported_languages import SupportedLanguage +from octoprint_mrbeamdoc.model.mrbeam_doc_definition import MrBeamDocDefinition + +from octoprint_mrbeam.services.document_service import DocumentService +from octoprint_mrbeam.services.burger_menu_service import BurgerMenuService +from tests.logger.test_logger import LoggerMock + + +class TestBurgerMenuService(TestCase): + + def setUp(self): + super(TestBurgerMenuService, self).setUp() + logger = LoggerMock() + self._burger_menu_service = BurgerMenuService(logger, DocumentService(logger)) + + def test_get_burger_menu_model__with_none__should_return_empty_burger_menu_model(self): + burger_menu_model = self._burger_menu_service.get_burger_menu_model(None) + self.assertIs(len(burger_menu_model.documents), 0) + + @patch('octoprint_mrbeam.services.burger_menu_service.get_locale') + def test_get_burger_menu_model__with_unsupported_language__should_return_default_to_english(self, get_locale_mock): + get_locale_mock.return_value = MagicMock(language='ch') + burger_menu_model = self._burger_menu_service.get_burger_menu_model(MrBeamModel.MRBEAM2.value) + self.assertIsNot(len(burger_menu_model.documents), 0) + for document in burger_menu_model.documents: + self.assertEquals(document.document_link.language, SupportedLanguage.ENGLISH) + + @patch('octoprint_mrbeam.services.burger_menu_service.MrBeamDocUtils.get_mrbeam_definitions_for') + @patch('octoprint_mrbeam.services.burger_menu_service.get_locale') + def test_get_burger_menu_model__with_language_not_valid_for_definition__should_fallback_to_english(self, + get_locale_mock, + get_mrbeam_definitions_for_mock): + get_locale_mock.return_value = MagicMock(language='de') + MOCK_DEFINITION = MrBeamDocDefinition(MrBeamDocType.QUICKSTART_GUIDE, MrBeamModel.MRBEAM2, + [SupportedLanguage.ENGLISH]) + get_mrbeam_definitions_for_mock.return_value = [MOCK_DEFINITION] + burger_menu_model = self._burger_menu_service.get_burger_menu_model(MrBeamModel.MRBEAM2.value) + self.assertIsNot(len(burger_menu_model.documents), 0) + for document in burger_menu_model.documents: + self.assertEquals(document.document_link.language, SupportedLanguage.ENGLISH) + + @patch('octoprint_mrbeam.services.burger_menu_service.get_locale') + def test_get_burger_menu_model__with_supported_language__should_return_documents_in_that_language(self, + get_locale_mock): + get_locale_mock.return_value = MagicMock(language='de') + burger_menu_model = self._burger_menu_service.get_burger_menu_model(MrBeamModel.MRBEAM2.value) + self.assertIsNot(len(burger_menu_model.documents), 0) + for document in burger_menu_model.documents: + self.assertEquals(document.document_link.language, SupportedLanguage.GERMAN) diff --git a/tests/services/test_settings_service.py b/tests/services/test_settings_service.py new file mode 100644 index 000000000..0a63bdef2 --- /dev/null +++ b/tests/services/test_settings_service.py @@ -0,0 +1,57 @@ +from unittest import TestCase + +from octoprint_mrbeamdoc.enum.mrbeam_model import MrBeamModel + +from octoprint_mrbeam import DocumentService +from octoprint_mrbeam.services.settings_service import SettingsService +from tests.logger.test_logger import LoggerMock + + +class TestSettingsService(TestCase): + def setUp(self): + super(TestSettingsService, self).setUp() + logger = LoggerMock() + self._settings_service = SettingsService(logger, DocumentService(logger)) + + def test_get_template_settings_model_with_none__then_return_settings_empty_object(self): + settings_model = self._settings_service.get_template_settings_model(None) + self._validate_empty_settings_model(settings_model) + + def test_get_template_settings_model_with_unknown__then_return_settings_empty_object(self): + settings_model = self._settings_service.get_template_settings_model('unknown') + self._validate_empty_settings_model(settings_model) + + def test_get_template_settings_model_with_mrbeam2__then_return_settings_with_about_and_nonempty_documents(self): + settings_model = self._settings_service.get_template_settings_model(MrBeamModel.MRBEAM2.value) + self._validate_settings_model(settings_model) + + def test_get_template_settings_model_with_dreamcut__then_return_settings_with_about_and_nonempty_documents(self): + settings_model = self._settings_service.get_template_settings_model(MrBeamModel.DREAMCUT_S.value) + self._validate_settings_model(settings_model) + + def _validate_empty_settings_model(self, settings_model): + self.assertIsNotNone(settings_model) + self.assertIsNotNone(settings_model.about) + self.assertIsNotNone(settings_model.about.support_documents) + self.assertEquals(settings_model.about.support_documents, []) + + def _validate_settings_model(self, settings_model): + self.assertIsNotNone(settings_model) + self.assertIsNotNone(settings_model.about) + documents = settings_model.about.support_documents + self.assertIsNotNone(documents) + for document in documents: + self.assertIsNotNone(document) + self.assertIsNotNone(document.title) + for document_link in document.document_links: + self.assertIsNotNone(document_link) + self.assertIsNotNone(document_link.language) + self.assertIsNotNone(document_link.language.name) + self.assertNotEquals(document_link.language.name, '') + self.assertNotEquals(document_link.language.name, ' ') + self.assertIsNotNone(document_link.language.value) + self.assertNotEquals(document_link.language.value, '') + self.assertNotEquals(document_link.language.value, ' ') + self.assertIsNotNone(document_link.url) + self.assertNotEquals(document_link.url, '') + self.assertNotEquals(document_link.url, ' ') diff --git a/tests/softwareupdate/__init__.py b/tests/softwareupdate/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/softwareupdate/mock_config.json b/tests/softwareupdate/mock_config.json new file mode 100644 index 000000000..4b2dca3b0 --- /dev/null +++ b/tests/softwareupdate/mock_config.json @@ -0,0 +1,176 @@ +{ + "default": { + "type": "github_commit", + "user": "mrbeam", + "release_compare": "python_unequal", + "force_base": false, + "stable": { + "branch": "mrbeam2-stable", + "branch_default": "mrbeam2-stable", + "type": "github_commit" + }, + "beta": { + "branch": "mrbeam2-beta", + "branch_default": "mrbeam2-beta", + "type": "github_commit", + "prerelease_channel": "beta", + "prerelease": true + }, + "develop": { + "type": "github_commit", + "branch": "alpha", + "branch_default": "alpha" + }, + "alpha": { + "branch": "mrbeam2-alpha", + "branch_default": "mrbeam2-alpha", + "prerelease_channel": "alpha", + "prerelease": true, + "type": "github_release" + }, + "restart": "environment", + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ] + }, + "modules": { + "octoprint": { + "type": "github_release", + "develop": { + "type": "github_commit", + "branch": "alpha", + "branch_default": "alpha" + }, + "alpha": { + "branch": "alpha", + "branch_default": "alpha" + } + }, + "mrbeam": { + "name": " MrBeam Plugin", + "repo": "MrBeamPlugin", + "restart": "environment", + "pip": "https://github.com/mrbeam/MrBeamPlugin/archive/{target_version}.zip", + "develop": { + "update_folder": "/tmp/octoprint/mrbeamplugin", + "update_script_relative_path": "scripts/update_script.py", + "update_script": "{{python}} '{update_script}' --branch={{branch}} --force={{force}} '{{folder}}' {{target}}", + "methode": "update_script" + }, + "alpha": { + "update_folder": "/tmp/octoprint/mrbeamplugin", + "update_script_relative_path": "scripts/update_script.py", + "update_script": "{{python}} '{update_script}' --branch={{branch}} --force={{force}} '{{folder}}' {{target}}", + "methode": "update_script" + }, + "dependencies": { + "mrbeam-ledstrips": { + "repo": "MrBeamLedStrips", + "pip": "https://github.com/mrbeam/MrBeamLedStrips/archive/{target_version}.zip", + "global_pip_command": true, + "beamos_version": { + "__ge__": { + "0.18.0": { + "pip_command": "sudo /usr/local/mrbeam_ledstrips/venv/bin/pip" + } + } + } + }, + "iobeam": { + "repo": "iobeam", + "pip": "git+ssh://git@bitbucket.org/mrbeam/iobeam.git@{target_version}", + "global_pip_command": true, + "beamos_version": { + "__ge__": { + "0.18.0": { + "pip_command": "sudo /usr/local/iobeam/venv/bin/pip" + } + } + } + }, + "mrb-hw-info": { + "repo": "mrb_hw_info", + "pip": "git+ssh://git@bitbucket.org/mrbeam/mrb_hw_info.git@{target_version}", + "global_pip_command": true, + "beamos_version": { + "__ge__": { + "0.18.0": { + "pip_command": "sudo /usr/local/iobeam/venv/bin/pip" + } + } + } + }, + "mrbeamdoc": { + "repo": "MrBeamDoc", + "pip": "https://github.com/mrbeam/MrBeamDoc/archive/{target_version}.zip" + } + } + }, + "netconnectd": { + "name": "OctoPrint-Netconnectd Plugin", + "repo": "OctoPrint-Netconnectd", + "pip": "https://github.com/mrbeam/OctoPrint-Netconnectd/archive/{target_version}.zip", + "restart": "environment", + "dependencies": { + "netconnectd": { + "repo": "netconnectd_mrbeam", + "pip": "https://github.com/mrbeam/netconnectd_mrbeam/archive/{target_version}.zip", + "global_pip_command": true, + "beamos_version": { + "__ge__": { + "0.18.0": { + "pip_command": "sudo /usr/local/netconnectd/venv/bin/pip" + } + }, + "__le__": { + "0.14.0": { + "version": "0.0.1" + } + } + } + } + }, + "develop": { + "update_folder": "/tmp/octoprint/netconnectd", + "update_script_relative_path": "../octoprint_netconnectd/scripts/update_script.py", + "update_script": "{{python}} '{update_script}' --branch={{branch}} --force={{force}} '{{folder}}' {{target}}", + "methode": "update_script" + }, + "alpha": { + "update_folder": "/tmp/octoprint/netconnectd", + "update_script_relative_path": "../octoprint_netconnectd/scripts/update_script.py", + "update_script": "{{python}} '{update_script}' --branch={{branch}} --force={{force}} '{{folder}}' {{target}}", + "methode": "update_script" + } + }, + "findmymrbeam": { + "name": "OctoPrint-FindMyMrBeam", + "repo": "OctoPrint-FindMyMrBeam", + "pip": "https://github.com/mrbeam/OctoPrint-FindMyMrBeam/archive/{target_version}.zip", + "restart": "octoprint" + } + } +} \ No newline at end of file diff --git a/tests/softwareupdate/target_find_my_mr_beam_config.json b/tests/softwareupdate/target_find_my_mr_beam_config.json new file mode 100644 index 000000000..176ab2fbf --- /dev/null +++ b/tests/softwareupdate/target_find_my_mr_beam_config.json @@ -0,0 +1,63 @@ +{ + "displayName": "OctoPrint-FindMyMrBeam", + "repo": "OctoPrint-FindMyMrBeam", + "displayVersion": "dummy", + "pip": "https://github.com/mrbeam/OctoPrint-FindMyMrBeam/archive/{target_version}.zip", + "type": "github_commit", + "restart": "octoprint", + "user": "mrbeam", + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ], + "force_base": false, + "release_compare": "python_unequal", + "tiers": { + "stable": { + "branch": "mrbeam2-stable", + "branch_default": "mrbeam2-stable", + "type": "github_commit" + }, + "beta": { + "branch": "mrbeam2-beta", + "branch_default": "mrbeam2-beta", + "type": "github_commit", + "prerelease_channel": "beta", + "prerelease": true + }, + "develop": { + "type": "github_commit", + "branch": "alpha", + "branch_default": "alpha" + }, + "alpha": { + "branch": "mrbeam2-alpha", + "branch_default": "mrbeam2-alpha", + "prerelease_channel": "alpha", + "prerelease": true, + "type": "github_release", + } + } +} \ No newline at end of file diff --git a/tests/softwareupdate/target_mrbeam_config.json b/tests/softwareupdate/target_mrbeam_config.json new file mode 100644 index 000000000..2151804bb --- /dev/null +++ b/tests/softwareupdate/target_mrbeam_config.json @@ -0,0 +1,99 @@ +{ + "displayName": " MrBeam Plugin", + "repo": "MrBeamPlugin", + "restart": "environment", + "pip": "https://github.com/mrbeam/MrBeamPlugin/archive/{target_version}.zip", + "type": "github_commit", + "user": "mrbeam", + "force_base": false, + "dependencies": { + "mrbeam-ledstrips": { + "repo": "MrBeamLedStrips", + "pip": "https://github.com/mrbeam/MrBeamLedStrips/archive/{target_version}.zip", + "global_pip_command": true, + "displayVersion": "-", + "pip_command": "sudo /usr/local/mrbeam_ledstrips/venv/bin/pip" + }, + "iobeam": { + "repo": "iobeam", + "pip": "git+ssh://git@bitbucket.org/mrbeam/iobeam.git@{target_version}", + "global_pip_command": true, + "displayVersion": "-", + "pip_command": "sudo /usr/local/iobeam/venv/bin/pip" + }, + "mrb-hw-info": { + "repo": "mrb_hw_info", + "pip": "git+ssh://git@bitbucket.org/mrbeam/mrb_hw_info.git@{target_version}", + "global_pip_command": true, + "displayVersion": "-", + "pip_command": "sudo /usr/local/iobeam/venv/bin/pip" + }, + "mrbeamdoc": { + "pip": "https://github.com/mrbeam/MrBeamDoc/archive/{target_version}.zip", + "repo": "MrBeamDoc", + "displayVersion": "dummy" + } + }, + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ], + "release_compare": "python_unequal", + "tiers": { + "stable": { + "branch": "mrbeam2-stable", + "branch_default": "mrbeam2-stable", + "type": "github_commit" + }, + "beta": { + "branch": "mrbeam2-beta", + "branch_default": "mrbeam2-beta", + "type": "github_commit", + "prerelease_channel": "beta", + "prerelease": true + }, + "develop": { + "type": "github_commit", + "branch": "alpha", + "branch_default": "alpha", + "update_folder": "/tmp/octoprint/mrbeamplugin", + "update_script_relative_path": "scripts/update_script.py", + "update_script": "{python} 'octoprint_mrbeam/scripts/update_script.py' --branch={branch} --force={force} '{folder}' {target}", + "methode": "update_script" + }, + "alpha": { + "branch": "mrbeam2-alpha", + "branch_default": "mrbeam2-alpha", + "prerelease_channel": "alpha", + "prerelease": true, + "type": "github_release", + "update_folder": "/tmp/octoprint/mrbeamplugin", + "update_script_relative_path": "scripts/update_script.py", + "update_script": "{python} 'octoprint_mrbeam/scripts/update_script.py' --branch={branch} --force={force} '{folder}' {target}", + "methode": "update_script" + } + }, + "displayVersion": "dummy" +} \ No newline at end of file diff --git a/tests/softwareupdate/target_mrbeam_config_legacy.json b/tests/softwareupdate/target_mrbeam_config_legacy.json new file mode 100644 index 000000000..0d685098e --- /dev/null +++ b/tests/softwareupdate/target_mrbeam_config_legacy.json @@ -0,0 +1,99 @@ +{ + "displayName": " MrBeam Plugin", + "repo": "MrBeamPlugin", + "restart": "environment", + "pip": "https://github.com/mrbeam/MrBeamPlugin/archive/{target_version}.zip", + "type": "github_commit", + "user": "mrbeam", + "force_base": false, + "dependencies": { + "mrbeam-ledstrips": { + "repo": "MrBeamLedStrips", + "pip": "https://github.com/mrbeam/MrBeamLedStrips/archive/{target_version}.zip", + "global_pip_command": true, + "displayVersion": "-", + "pip_command": "sudo /usr/local/bin/pip", + }, + "iobeam": { + "repo": "iobeam", + "pip": "git+ssh://git@bitbucket.org/mrbeam/iobeam.git@{target_version}", + "global_pip_command": true, + "displayVersion": "-", + "pip_command": "sudo /usr/local/bin/pip", + }, + "mrb-hw-info": { + "repo": "mrb_hw_info", + "pip": "git+ssh://git@bitbucket.org/mrbeam/mrb_hw_info.git@{target_version}", + "global_pip_command": true, + "displayVersion": "-", + "pip_command": "sudo /usr/local/bin/pip", + }, + "mrbeamdoc": { + "pip": "https://github.com/mrbeam/MrBeamDoc/archive/{target_version}.zip", + "repo": "MrBeamDoc", + "displayVersion": "dummy" + } + }, + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ], + "release_compare": "python_unequal", + "tiers": { + "stable": { + "branch": "mrbeam2-stable", + "branch_default": "mrbeam2-stable", + "type": "github_commit" + }, + "beta": { + "branch": "mrbeam2-beta", + "branch_default": "mrbeam2-beta", + "type": "github_commit", + "prerelease_channel": "beta", + "prerelease": true + }, + "develop": { + "type": "github_commit", + "branch": "alpha", + "branch_default": "alpha", + "update_folder": "/tmp/octoprint/mrbeamplugin", + "update_script_relative_path": "scripts/update_script.py", + "update_script": "{python} 'octoprint_mrbeam/scripts/update_script.py' --branch={branch} --force={force} '{folder}' {target}", + "methode": "update_script" + }, + "alpha": { + "branch": "mrbeam2-alpha", + "branch_default": "mrbeam2-alpha", + "prerelease_channel": "alpha", + "prerelease": true, + "type": "github_release", + "update_folder": "/tmp/octoprint/mrbeamplugin", + "update_script_relative_path": "scripts/update_script.py", + "update_script": "{python} 'octoprint_mrbeam/scripts/update_script.py' --branch={branch} --force={force} '{folder}' {target}", + "methode": "update_script" + } + }, + "displayVersion": "dummy" +} \ No newline at end of file diff --git a/tests/softwareupdate/target_netconnectd_config.json b/tests/softwareupdate/target_netconnectd_config.json new file mode 100644 index 000000000..e910f3fc8 --- /dev/null +++ b/tests/softwareupdate/target_netconnectd_config.json @@ -0,0 +1,80 @@ +{ + "displayVersion": "dummy", + "displayName": "OctoPrint-Netconnectd Plugin", + "user": "mrbeam", + "repo": "OctoPrint-Netconnectd", + "pip": "https://github.com/mrbeam/OctoPrint-Netconnectd/archive/{target_version}.zip", + "restart": "environment", + "type": "github_commit", + "force_base": false, + "dependencies": { + "netconnectd": { + "displayVersion": "-", + "repo": "netconnectd_mrbeam", + "pip": "https://github.com/mrbeam/netconnectd_mrbeam/archive/{target_version}.zip", + "global_pip_command": true, + "pip_command": "sudo /usr/local/netconnectd/venv/bin/pip" + } + }, + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ], + "release_compare": "python_unequal", + "tiers": { + "stable": { + "branch": "mrbeam2-stable", + "branch_default": "mrbeam2-stable", + "type": "github_commit" + }, + "beta": { + "branch": "mrbeam2-beta", + "branch_default": "mrbeam2-beta", + "type": "github_commit", + "prerelease_channel": "beta", + "prerelease": true + }, + "develop": { + "type": "github_commit", + "branch": "alpha", + "branch_default": "alpha", + "update_folder": "/tmp/octoprint/netconnectd", + "update_script_relative_path": "../octoprint_netconnectd/scripts/update_script.py", + "update_script": "{python} 'octoprint_mrbeam/../octoprint_netconnectd/scripts/update_script.py' --branch={branch} --force={force} '{folder}' {target}", + "methode": "update_script" + }, + "alpha": { + "branch": "mrbeam2-alpha", + "branch_default": "mrbeam2-alpha", + "prerelease_channel": "alpha", + "prerelease": true, + "type": "github_release", + "update_folder": "/tmp/octoprint/netconnectd", + "update_script_relative_path": "../octoprint_netconnectd/scripts/update_script.py", + "update_script": "{python} 'octoprint_mrbeam/../octoprint_netconnectd/scripts/update_script.py' --branch={branch} --force={force} '{folder}' {target}", + "methode": "update_script" + } + } +} \ No newline at end of file diff --git a/tests/softwareupdate/target_netconnectd_config_legacy.json b/tests/softwareupdate/target_netconnectd_config_legacy.json new file mode 100644 index 000000000..49542b738 --- /dev/null +++ b/tests/softwareupdate/target_netconnectd_config_legacy.json @@ -0,0 +1,81 @@ +{ + "displayVersion": "dummy", + "displayName": "OctoPrint-Netconnectd Plugin", + "user": "mrbeam", + "repo": "OctoPrint-Netconnectd", + "pip": "https://github.com/mrbeam/OctoPrint-Netconnectd/archive/{target_version}.zip", + "restart": "environment", + "type": "github_commit", + "force_base": false, + "dependencies": { + "netconnectd": { + "displayVersion": "-", + "repo": "netconnectd_mrbeam", + "pip": "https://github.com/mrbeam/netconnectd_mrbeam/archive/{target_version}.zip", + "global_pip_command": true, + "pip_command": "sudo /usr/local/bin/pip", + "version": "0.0.1" + } + }, + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ], + "release_compare": "python_unequal", + "tiers": { + "stable": { + "branch": "mrbeam2-stable", + "branch_default": "mrbeam2-stable", + "type": "github_commit" + }, + "beta": { + "branch": "mrbeam2-beta", + "branch_default": "mrbeam2-beta", + "type": "github_commit", + "prerelease_channel": "beta", + "prerelease": true + }, + "develop": { + "type": "github_commit", + "branch": "alpha", + "branch_default": "alpha", + "update_folder": "/tmp/octoprint/netconnectd", + "update_script_relative_path": "../octoprint_netconnectd/scripts/update_script.py", + "update_script": "{python} 'octoprint_mrbeam/../octoprint_netconnectd/scripts/update_script.py' --branch={branch} --force={force} '{folder}' {target}", + "methode": "update_script" + }, + "alpha": { + "branch": "mrbeam2-alpha", + "branch_default": "mrbeam2-alpha", + "prerelease_channel": "alpha", + "prerelease": true, + "type": "github_release", + "update_folder": "/tmp/octoprint/netconnectd", + "update_script_relative_path": "../octoprint_netconnectd/scripts/update_script.py", + "update_script": "{python} 'octoprint_mrbeam/../octoprint_netconnectd/scripts/update_script.py' --branch={branch} --force={force} '{folder}' {target}", + "methode": "update_script" + } + } +} \ No newline at end of file diff --git a/tests/softwareupdate/target_octoprint_config.json b/tests/softwareupdate/target_octoprint_config.json new file mode 100644 index 000000000..4c4b33380 --- /dev/null +++ b/tests/softwareupdate/target_octoprint_config.json @@ -0,0 +1,146 @@ +{ + "develop": { + "type": "github_commit", + "restart": "environment", + "user": "mrbeam", + "branch": "alpha", + "branch_default": "alpha", + "force_base": false, + "release_compare": "python_unequal", + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ] + }, + "beta": { + "type": "github_commit", + "prerelease_channel": "beta", + "prerelease": true, + "restart": "environment", + "user": "mrbeam", + "branch": "mrbeam2-beta", + "branch_default": "mrbeam2-beta", + "force_base": false, + "release_compare": "python_unequal", + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ] + }, + "alpha": { + "type": "github_release", + "prerelease_channel": "alpha", + "prerelease": true, + "restart": "environment", + "user": "mrbeam", + "branch": "alpha", + "branch_default": "alpha", + "force_base": false, + "release_compare": "python_unequal", + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ] + }, + "stable": { + "type": "github_commit", + "restart": "environment", + "user": "mrbeam", + "branch": "mrbeam2-stable", + "branch_default": "mrbeam2-stable", + "force_base": false, + "release_compare": "python_unequal", + "stable_branch": { + "branch": "stable", + "name": "stable", + "commitish": [ + "stable" + ] + }, + "prerelease_branches": [ + { + "name": "alpha", + "branch": "alpha", + "commitish": [ + "alpha", + "beta", + "stable" + ] + }, + { + "name": "beta", + "branch": "beta", + "commitish": [ + "beta", + "stable" + ] + } + ] + } +} \ No newline at end of file diff --git a/tests/softwareupdate/test_cloud_config.py b/tests/softwareupdate/test_cloud_config.py new file mode 100644 index 000000000..df95526fc --- /dev/null +++ b/tests/softwareupdate/test_cloud_config.py @@ -0,0 +1,581 @@ +# coding=utf-8 +from __future__ import absolute_import, division, print_function + +import base64 +import json +import os +import unittest +from os.path import dirname, realpath + +import requests +import requests_mock +from copy import deepcopy +from mock import mock_open +from mock import patch +from octoprint.events import EventManager +from packaging import version +import yaml + +from octoprint_mrbeam import ( + deviceInfo, + IS_X86, + mrb_logger, + user_notification_system, + MrBeamPlugin, +) +from octoprint_mrbeam.software_update_information import ( + _get_tier_by_id, + get_update_information, + SW_UPDATE_INFO_FILE_NAME, + SW_UPDATE_TIERS, +) +from octoprint_mrbeam.user_notification_system import UserNotificationSystem +from octoprint_mrbeam.util import dict_merge +from octoprint_mrbeam.util.device_info import DeviceInfo + +TMP_BASE_FOLDER_PATH = "/tmp/cloud_config_test/" + + +class SettingsDummy(object): + tier = None + + def getBaseFolder(self, args, **kwargs): + return TMP_BASE_FOLDER_PATH + + def get(self, list): + return self.tier + + def set(self, tier): + self.tier = tier + + def settings(self, init=False, basedir=None, configfile=None): + return None + + +class DummyConnectivityChecker: + online = True + + def check_immediately(self): + return self.online + + +class PluginInfoDummy: + _refresh_configured_checks = None + _version_cache = None + _version_cache_dirty = None + + +class PluginManagerDummy: + version = "dummy" + implementation = PluginInfoDummy() + + def send_plugin_message(self, *args): + return True + + def get_plugin_info(self, module_id): + return self + + +class MrBeamPluginDummy(MrBeamPlugin): + _settings = SettingsDummy() + _plugin_manager = PluginManagerDummy() + _device_info = deviceInfo(use_dummy_values=IS_X86) + _connectivity_checker = DummyConnectivityChecker() + _plugin_version = "dummy" + _event_bus = EventManager() + _basefolder = "octoprint_mrbeam" + + @patch("octoprint.settings.settings") + def __init__(self, settings_mock): + settings_mock.return_value = None + self._logger = mrb_logger("test.Plugindummy") + self.user_notification_system = user_notification_system(self) + + +class SoftwareupdateConfigTestCase(unittest.TestCase): + _softwareupdate_handler = None + plugin = None + + def setUp(self): + self.plugin = MrBeamPluginDummy() + self.mock_major_tag_version = 1 + with open( + os.path.join(dirname(realpath(__file__)), "target_octoprint_config.json") + ) as json_file: + self.target_octoprint_config = yaml.safe_load(json_file) + with open( + os.path.join( + dirname(realpath(__file__)), "target_find_my_mr_beam_config.json" + ) + ) as json_file: + self.target_find_my_mr_beam_config = yaml.safe_load(json_file) + with open( + os.path.join(dirname(realpath(__file__)), "target_netconnectd_config.json") + ) as json_file: + self.target_netconnectd_config = yaml.safe_load(json_file) + with open( + os.path.join( + dirname(realpath(__file__)), "target_netconnectd_config_legacy.json" + ) + ) as json_file: + self.target_netconnectd_config_legacy = yaml.safe_load(json_file) + with open( + os.path.join(dirname(realpath(__file__)), "target_mrbeam_config.json") + ) as json_file: + self.target_mrbeam_config = yaml.safe_load(json_file) + with open( + os.path.join( + dirname(realpath(__file__)), "target_mrbeam_config_legacy.json" + ) + ) as json_file: + self.target_mrbeam_config_legacy = yaml.safe_load(json_file) + with open( + os.path.join(dirname(realpath(__file__)), "mock_config.json") + ) as json_file: + self.mock_config = yaml.safe_load(json_file) + + @patch.object( + UserNotificationSystem, + "show_notifications", + ) + @patch.object( + UserNotificationSystem, + "get_notification", + ) + def test_server_not_reachable(self, show_notifications_mock, get_notification_mock): + """ + Testcase to test what happens if the server is not reachable + + Args: + show_notifications_mock: mock of the notifications system show methode + get_notification_mock: mock of the notifications system get methode + + Returns: + None + """ + with patch("__builtin__.open", mock_open(read_data="data")) as mock_file: + get_notification_mock.return_value = None + plugin = self.plugin + + with requests_mock.Mocker() as rm: + rm.get( + "https://api.github.com/repos/mrbeam/beamos_config/tags", + json={"test": "test"}, + status_code=404, + ) + rm.get( + "https://api.github.com/repos/mrbeam/beamos_config/contents/docs/sw-update-conf.json?ref=vNone", + status_code=404, + ) + update_config = get_update_information(plugin) + assert update_config == { + "findmymrbeam": { + "displayName": "OctoPrint-FindMyMrBeam", + "displayVersion": "dummy", + "pip": "", + "repo": "", + "type": "github_commit", + "user": "", + }, + "mrbeam": { + "displayName": " MrBeam Plugin", + "displayVersion": "dummy", + "pip": "", + "repo": "", + "type": "github_commit", + "user": "", + }, + "netconnectd": { + "displayName": "OctoPrint-Netconnectd Plugin", + "displayVersion": "dummy", + "pip": "", + "repo": "", + "type": "github_commit", + "user": "", + }, + } + show_notifications_mock.assert_called_with( + notification_id="missing_updateinformation_info", replay=False + ) + show_notifications_mock.assert_called_once() + + @patch.object(DeviceInfo, "get_beamos_version_number") + def test_cloud_config_buster_online(self, device_info_mock): + """ + Testcase to test the buster config with the online available cloud config + + Args: + device_info_mock: mocks the device info to change the image version + + Returns: + None + """ + self.maxDiff = None + self.check_if_githubapi_rate_limit_exceeded() + self.maxDiff = None + beamos_version_buster = "0.18.0" + device_info_mock.return_value = beamos_version_buster + plugin = self.plugin + with patch("__builtin__.open", mock_open(read_data="data")) as mock_file: + # test for all tiers + for tier in SW_UPDATE_TIERS: + self.plugin._settings.set(tier) + update_config = get_update_information(plugin) + print("config {}".format(update_config)) + self.assertEquals( + update_config["octoprint"], + self.target_octoprint_config[_get_tier_by_id(tier)], + ) + self.validate_mrbeam_module_config( + update_config["mrbeam"], + _get_tier_by_id(tier), + beamos_version_buster, + ) + self.validate_findmymrbeam_module_config( + update_config["findmymrbeam"], + _get_tier_by_id(tier), + beamos_version_buster, + ) + self.validate_netconnect_module_config( + update_config["netconnectd"], + _get_tier_by_id(tier), + beamos_version_buster, + ) + + @patch.object(DeviceInfo, "get_beamos_version_number") + def test_cloud_confg_legacy_online(self, device_info_mock): + """ + Testcase to test the leagcy image config with the online available cloud config + + Args: + device_info_mock: mocks the device info to change the image version + + Returns: + None + """ + self.check_if_githubapi_rate_limit_exceeded() + self.maxDiff = None + beamos_version_legacy = "0.14.0" + device_info_mock.return_value = beamos_version_legacy + with patch("__builtin__.open", mock_open(read_data="data")) as mock_file: + plugin = self.plugin + + # test for all tiers + for tier in SW_UPDATE_TIERS: + self.plugin._settings.set(tier) + update_config = get_update_information(plugin) + print("config {}".format(update_config)) + self.assertEquals( + update_config["octoprint"], + self.target_octoprint_config[_get_tier_by_id(tier)], + ) + self.validate_mrbeam_module_config( + update_config["mrbeam"], + _get_tier_by_id(tier), + beamos_version_legacy, + ) + self.validate_findmymrbeam_module_config( + update_config["findmymrbeam"], + _get_tier_by_id(tier), + beamos_version_legacy, + ) + self.validate_netconnect_module_config( + update_config["netconnectd"], + _get_tier_by_id(tier), + beamos_version_legacy, + ) + + @patch.object(DeviceInfo, "get_beamos_version_number") + def test_cloud_confg_buster_mock(self, device_info_mock): + """ + tests the update info with a mocked server response + + Args: + device_info_mock: mocks the device info to change the image version + + Returns: + None + """ + beamos_version_buster = "0.18.0" + device_info_mock.return_value = beamos_version_buster + with patch("__builtin__.open", mock_open(read_data="data")) as mock_file: + with requests_mock.Mocker() as rm: + rm.get( + "https://api.github.com/repos/mrbeam/beamos_config/tags", + status_code=200, + json=[ + { + "name": "v{}.0.2-mock".format(self.mock_major_tag_version), + } + ], + ) + rm.get( + "https://api.github.com/repos/mrbeam/beamos_config/contents/docs/sw-update-conf.json?ref=v{}.0.2-mock".format( + self.mock_major_tag_version + ), + status_code=200, + json={ + "content": base64.urlsafe_b64encode( + json.dumps(self.mock_config) + ) + }, + ) + plugin = self.plugin + + # test for all tiers + for tier in SW_UPDATE_TIERS: + self.plugin._settings.set(tier) + update_config = get_update_information(plugin) + self.maxDiff = None + self.assertEquals( + update_config["octoprint"], + self.target_octoprint_config[_get_tier_by_id(tier)], + ) + self.validate_mrbeam_module_config( + update_config["mrbeam"], + _get_tier_by_id(tier), + beamos_version_buster, + ) + self.validate_findmymrbeam_module_config( + update_config["findmymrbeam"], + _get_tier_by_id(tier), + beamos_version_buster, + ) + self.validate_netconnect_module_config( + update_config["netconnectd"], + _get_tier_by_id(tier), + beamos_version_buster, + ) + mock_file.assert_called_with( + TMP_BASE_FOLDER_PATH + SW_UPDATE_INFO_FILE_NAME, "w" + ) + + @patch.object(DeviceInfo, "get_beamos_version_number") + def test_cloud_confg_legacy_mock(self, device_info_mock): + """ + tests the updateinfo hook for the legacy image + + Args: + device_info_mock: mocks the device info to change the image version + + Returns: + None + """ + beamos_version_legacy = "0.14.0" + device_info_mock.return_value = beamos_version_legacy + with patch("__builtin__.open", mock_open(read_data="data")) as mock_file: + with requests_mock.Mocker() as rm: + rm.get( + "https://api.github.com/repos/mrbeam/beamos_config/tags", + status_code=200, + json=[ + { + "name": "v{}.0.2-mock".format(self.mock_major_tag_version), + } + ], + ) + rm.get( + "https://api.github.com/repos/mrbeam/beamos_config/contents/docs/sw-update-conf.json?ref=v{}.0.2-mock".format( + self.mock_major_tag_version + ), + status_code=200, + json={ + "content": base64.urlsafe_b64encode( + json.dumps(self.mock_config) + ) + }, + ) + plugin = self.plugin + + # test for all tiers + for tier in SW_UPDATE_TIERS: + self.plugin._settings.set(tier) + update_config = get_update_information(plugin) + + print("config {}".format(update_config)) + self.maxDiff = None + self.assertEquals( + update_config["octoprint"], + self.target_octoprint_config[_get_tier_by_id(tier)], + ) + self.validate_mrbeam_module_config( + update_config["mrbeam"], + _get_tier_by_id(tier), + beamos_version_legacy, + ) + self.validate_findmymrbeam_module_config( + update_config["findmymrbeam"], + _get_tier_by_id(tier), + beamos_version_legacy, + ) + self.validate_netconnect_module_config( + update_config["netconnectd"], + _get_tier_by_id(tier), + beamos_version_legacy, + ) + mock_file.assert_called_with( + TMP_BASE_FOLDER_PATH + SW_UPDATE_INFO_FILE_NAME, "w" + ) + + @patch.object( + UserNotificationSystem, + "show_notifications", + ) + @patch.object( + UserNotificationSystem, + "get_notification", + ) + def test_cloud_confg_fileerror( + self, + user_notification_system_show_mock, + user_notification_system_get_mock, + ): + """ + Tests the update information hook with a fileerror + + Args: + user_notification_system_show_mock: mock of the notification system show methode + user_notification_system_get_mock: mock of the notification system get methode + + Returns: + None + """ + user_notification_system_get_mock.return_value = None + with requests_mock.Mocker() as rm: + rm.get( + "https://api.github.com/repos/mrbeam/beamos_config/tags", + status_code=200, + json=[ + { + "name": "v{}.0.2-mock".format(self.mock_major_tag_version), + } + ], + ) + rm.get( + "https://api.github.com/repos/mrbeam/beamos_config/contents/docs/sw-update-conf.json?ref=v{}.0.2-mock".format( + self.mock_major_tag_version + ), + status_code=200, + json={ + "content": base64.urlsafe_b64encode(json.dumps(self.mock_config)) + }, + ) + plugin = self.plugin + + update_config = get_update_information(plugin) + + self.assertIsNone(update_config) + user_notification_system_show_mock.assert_called_with( + notification_id="write_error_update_info_file_err", replay=False + ) + user_notification_system_show_mock.assert_called_once() + + def validate_mrbeam_module_config(self, update_config, tier, beamos_version): + """ + validates the config of the mrbeam software module + + Args: + update_config: update config + tier: software tier + beamos_version: version of the beamos image + + Returns: + None + """ + if beamos_version >= "0.18.0": + target_config = self.target_mrbeam_config + else: + target_config = self.target_mrbeam_config_legacy + self.validate_module_config(update_config, tier, target_config, beamos_version) + + def validate_findmymrbeam_module_config(self, update_config, tier, beamos_version): + """ + validates the config of a the findmymrbeam software module + + Args: + update_config: update config + tier: software tier + beamos_version: version of the beamos image + + Returns: + None + """ + self.validate_module_config( + update_config, tier, self.target_find_my_mr_beam_config, beamos_version + ) + + def validate_netconnect_module_config(self, update_config, tier, beamos_version): + """ + validates the config of a the netconnectd software module + + Args: + update_config: update config + tier: software tier + beamos_version: version of the beamos image + + Returns: + None + """ + if beamos_version >= "0.18.0": + target_config = self.target_netconnectd_config + else: + target_config = self.target_netconnectd_config_legacy + + self.validate_module_config(update_config, tier, target_config, beamos_version) + + def _set_tier_config(self, config, tier): + """ + generates the updateinformation for a given software tier + + Args: + config: update config + tier: software tier to use + + Returns: + updateinformation for the given tier + """ + if "tiers" in config: + config = dict_merge(config, config["tiers"][tier]) + config.pop("tiers") + return config + + def validate_module_config( + self, update_config, tier, target_module_config, beamos_version + ): + """ + validates the updateinfromation fot the given software module + + Args: + update_config: update config + tier: software tier + target_module_config: software module to validate + beamos_version: beamos image version + + Returns: + None + """ + copy_target_config = deepcopy(target_module_config) + if "dependencies" in copy_target_config: + for dependencie_name, dependencie_config in copy_target_config[ + "dependencies" + ].items(): + dependencie_config = self._set_tier_config(dependencie_config, tier) + copy_target_config["dependencies"][ + dependencie_name + ] = dependencie_config + + copy_target_config = self._set_tier_config(copy_target_config, tier) + + self.assertEquals(update_config, copy_target_config) + + def check_if_githubapi_rate_limit_exceeded(self): + """ + checks if the githubapi rate limit is exeeded + Returns: + None + """ + r = requests.get( + "https://api.github.com/repos/mrbeam/beamos_config/contents/docs/sw-update-conf.json" + ) + # check if rate limit exceeded + r.raise_for_status() diff --git a/tests/softwareupdate/test_comparison.py b/tests/softwareupdate/test_comparison.py new file mode 100644 index 000000000..e4799c8ee --- /dev/null +++ b/tests/softwareupdate/test_comparison.py @@ -0,0 +1,137 @@ +import operator +import unittest + +import pkg_resources + +from octoprint_mrbeam.software_update_information import ( + VersionComperator, + _generate_config_of_beamos, + get_config_for_version, +) + + +def bla(comp1, comparision_options): + return VersionComperator.get_comperator(comp1, comparision_options).priority + + +class VersionCaomparisionTestCase(unittest.TestCase): + def setUp(self): + self.le_element = {"__le__": {"0.17.0": {"value": 3}}} + self.ge_element = { + "__ge__": { + "0.18.0": {"value": 2}, + "0.14.0": {"value": 1}, + "0.18.1": {"value": 5}, + "1.0.0": {"value": 6}, + } + } + self.eq_element = {"__eq__": {"0.16.5": {"value": 4}}} + self.config = {} + self.config.update(self.ge_element) + self.config.update(self.le_element) + self.config.update(self.eq_element) + self.comparision_options = [ + VersionComperator("__eq__", 5, operator.eq), + VersionComperator("__le__", 4, operator.le), + VersionComperator("__lt__", 3, operator.lt), + VersionComperator("__ge__", 2, operator.ge), + VersionComperator("__gt__", 1, operator.gt), + ] + + def test_sorted(self): + print(self.config) + config = sorted( + self.config, + cmp=lambda comp1, comp2: cmp( + bla(comp1, self.comparision_options), + bla(comp2, self.comparision_options), + ), + ) + self.assertEquals(config, ["__ge__", "__le__", "__eq__"]), + + def test_compare(self): + config = sorted( + self.config.items(), + key=lambda com: VersionComperator.get_comperator( + com[0], self.comparision_options + ).priority, + ) + print(config) + + self.assertEquals( + 2, + get_config_for_version("0.18.0", config, self.comparision_options).get( + "value" + ), + ) + self.assertEquals( + 1, + get_config_for_version("0.17.1", config, self.comparision_options).get( + "value" + ), + ) + self.assertEquals( + 3, + get_config_for_version("0.16.8", config, self.comparision_options).get( + "value" + ), + ) + self.assertEquals( + 4, + get_config_for_version("0.16.5", config, self.comparision_options).get( + "value" + ), + ) + self.assertEquals( + 5, + get_config_for_version("0.18.2", config, self.comparision_options).get( + "value" + ), + ) + self.assertEquals( + 6, + get_config_for_version("1.0.0", config, self.comparision_options).get( + "value" + ), + ) + # only support major minor patch so far + # self.assertEquals( + # 1, + # get_config_for_version("0.17.5.pre0", config, self.comparision_options).get( + # "value" + # ), + # ) + # self.assertEquals( + # 1, + # get_config_for_version("0.18.0a0", config, self.comparision_options).get( + # "value" + # ), + # ) + + def test_generate_config_of_beamos(self): + config = { + "repo": "netconnectd_mrbeam", + "pip": "https://github.com/mrbeam/netconnectd_mrbeam/archive/{target_version}.zip", + "global_pip_command": True, + "beamos_date": { + "2021-06-11": { + "pip_command": "sudo /usr/local/netconnectd/venv/bin/pip" + } + }, + "beamos_version": { + "__ge__": { + "0.18.0": { + "pip_command": "sudo /usr/local/netconnectd/venv/bin/pip" + } + }, + "__le__": {"0.14.0": {"version": "0.0.1"}}, + }, + } + + self.assertEquals( + _generate_config_of_beamos(config, "0.14.0", "stable").get("version"), + "0.0.1", + ) + self.assertEquals( + _generate_config_of_beamos(config, "0.18.0", "stable").get("version"), None + ) diff --git a/tests/softwareupdate/test_dependencies.py b/tests/softwareupdate/test_dependencies.py new file mode 100644 index 000000000..f51ee4ff8 --- /dev/null +++ b/tests/softwareupdate/test_dependencies.py @@ -0,0 +1,16 @@ +import os +import re +import unittest + + +class TestUpdateScript(unittest.TestCase): + def test_dependencies_file(self): + dependencies_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "../../octoprint_mrbeam/dependencies.txt", + ) + dependencies_pattern = r"([a-z]+(?:[_-][a-z]+)*)==(([1-9][0-9]*!)?(0|[1-9][0-9]*)(\.(0|[1-9][0-9]*))*((a|b|rc)(0|[1-9][0-9]*))?(\.post(0|[1-9][0-9]*))?(\.dev(0|[1-9][0-9]*))?$)" # $ ad the end needed so we see if there is a leftover at the end + with open(dependencies_path, "r") as f: + lines = f.readlines() + for line in lines: + self.assertRegexpMatches(line, dependencies_pattern) diff --git a/tests/util/__init__.py b/tests/util/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/util/test_string_util.py b/tests/util/test_string_util.py new file mode 100644 index 000000000..e0ce781a2 --- /dev/null +++ b/tests/util/test_string_util.py @@ -0,0 +1,44 @@ +from unittest import TestCase + +from octoprint_mrbeamdoc.enum.mrbeam_doctype import MrBeamDocType + +from octoprint_mrbeam.util import string_util + + +class TestStringUtils(TestCase): + def test_extra_space_at_the_end__then_removed(self): + self.assertEquals(string_util.separate_camelcase_words('Test '), 'Test') + + def test_extra_space_at_the_beginning__then_removed(self): + self.assertEquals(string_util.separate_camelcase_words(' Test'), 'Test') + + def test_extra_space_at_the_middle__then_removed(self): + self.assertEquals(string_util.separate_camelcase_words('Test Test Test'), 'Test Test Test') + + def test_no_camelcase__then_only_removed_extra_space(self): + self.assertEquals(string_util.separate_camelcase_words('Test Test Test', separator=','), 'Test Test Test') + + def test_camelcase_and_extra_space__then_separate_and_removed_extra_space(self): + self.assertEquals(string_util.separate_camelcase_words('Test TestTest', separator=','), 'Test Test,Test') + + def test_uppercase_word__then_first_char_separated_and_next_chars_in_groups_of_2(self): + self.assertEquals(string_util.separate_camelcase_words('TESTTEST', separator=','), 'T,ES,TT,ES,T') + + def test_lowercase_word__then_unchanged(self): + self.assertEquals(string_util.separate_camelcase_words('testtest', separator=','), 'testtest') + + def test_separation_of_2_words(self): + self.assertEquals(string_util.separate_camelcase_words('TestTest'), 'Test Test') + + def test_separation_of_3_words(self): + self.assertEquals(string_util.separate_camelcase_words('TestTestTest'), 'Test Test Test') + + def test_custom_separator(self): + self.assertEquals(string_util.separate_camelcase_words('AtestBtestCtest', separator=','), 'Atest,Btest,Ctest') + + def test_separate_mrbeamdoc_type_usermanual_right_format_for_translation(self): + self.assertEquals(string_util.separate_camelcase_words(MrBeamDocType.USER_MANUAL.value), 'User Manual') + + def test_separate_mrbeamdoc_type_quickstart_right_format_for_translation(self): + self.assertEquals(string_util.separate_camelcase_words(MrBeamDocType.QUICKSTART_GUIDE.value), + 'Quickstart Guide') diff --git a/versioneer.py b/versioneer.py new file mode 100644 index 000000000..64fea1c89 --- /dev/null +++ b/versioneer.py @@ -0,0 +1,1822 @@ + +# Version: 0.18 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/warner/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy +* [![Latest Version] +(https://pypip.in/version/versioneer/badge.svg?style=flat) +](https://pypi.python.org/pypi/versioneer/) +* [![Build Status] +(https://travis-ci.org/warner/python-versioneer.png?branch=master) +](https://travis-ci.org/warner/python-versioneer) + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere to your $PATH +* add a `[versioneer]` section to your setup.cfg (see below) +* run `versioneer install` in your source tree, commit the results + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes. + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/warner/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other langauges) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + +### Unicode version strings + +While Versioneer works (and is continually tested) with both Python 2 and +Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. +Newer releases probably generate unicode version strings on py2. It's not +clear that this is wrong, but it may be surprising for applications when then +write these strings to a network connection or include them in bytes-oriented +APIs like cryptographic checksums. + +[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates +this question. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +""" + +from __future__ import print_function +try: + import configparser +except ImportError: + import ConfigParser as configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + me = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(me)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py)) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.SafeConfigParser() + with open(setup_cfg, "r") as f: + parser.readfp(f) + VCS = parser.get("versioneer", "VCS") # mandatory + + def get(parser, name): + if parser.has_option("versioneer", name): + return parser.get("versioneer", name) + return None + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = get(parser, "style") or "" + cfg.versionfile_source = get(parser, "versionfile_source") + cfg.versionfile_build = get(parser, "versionfile_build") + cfg.tag_prefix = get(parser, "tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = get(parser, "parentdir_prefix") + cfg.verbose = get(parser, "verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +LONG_VERSION_PY['git'] = ''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.18 (https://github.com/warner/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Decorator to mark a method as the handler for a particular VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = p.communicate()[0].strip() + if sys.version_info[0] >= 3: + stdout = stdout.decode() + if p.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%%s*" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%%d" %% pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + me = __file__ + if me.endswith(".pyc") or me.endswith(".pyo"): + me = os.path.splitext(me)[0] + ".py" + versioneer_file = os.path.relpath(me) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + f = open(".gitattributes", "r") + for line in f.readlines(): + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + f.close() + except EnvironmentError: + pass + if not present: + f = open(".gitattributes", "a+") + f.write("%s export-subst\n" % versionfile_source) + f.close() + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.18) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod("unable to read _version.py") + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Eexceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(): + """Get the custom setuptools/distutils subclasses used by Versioneer.""" + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/warner/python-versioneer/issues/52 + + cmds = {} + + # we add "version" to both distutils and setuptools + from distutils.core import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if 'py2exe' in sys.modules: # py2exe enabled? + try: + from py2exe.distutils_buildexe import py2exe as _py2exe # py3 + except ImportError: + from py2exe.build_exe import py2exe as _py2exe # py2 + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +INIT_PY_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + + +def do_setup(): + """Main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except (EnvironmentError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + if INIT_PY_SNIPPET not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(INIT_PY_SNIPPET) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print(" appending versionfile_source ('%s') to MANIFEST.in" % + cfg.versionfile_source) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1)