diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..2441bb3 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,10 @@ +[run] +branch = True +source = sync2jira/ + +[report] +omit = + sync2jira/mailer.py + continuous-deployment/continuous_deployment.py + +fail_under = 70 \ No newline at end of file diff --git a/.github/workflows/tox.yml b/.github/workflows/tox.yml new file mode 100644 index 0000000..9bc11f6 --- /dev/null +++ b/.github/workflows/tox.yml @@ -0,0 +1,33 @@ +--- +name: Run Tox + +# yamllint disable rule:truthy +on: + - push + - pull_request + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + + - name: Run Tox tests + id: test + uses: fedora-python/tox-github-action@master + with: + tox_env: py39,lint + # Same list of packages in Dockerfile + dnf_install: >- + --repo fedora --repo updates + python3-requests + python3-jira + python3-fedmsg-core + python3-pygithub + python3-jinja2 + python3-pypandoc + python3-requests-kerberos + + - name: Coveralls + uses: AndreMiras/coveralls-python-action@develop diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..33b01b9 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,48 @@ +FROM fedora:latest +LABEL \ + name="sync2jira" \ + description="sync2jira application" \ + vendor="sync2jira developers" \ + license="GPLv2+" \ + build-date="" + +# Installing sync2jira dependencies +RUN dnf -y install \ + git \ + python3-pip \ + python3-requests \ + python3-jira \ + python3-fedmsg-core \ + python3-pygithub \ + python3-jinja2 \ + python3-pypandoc \ + python3-requests-kerberos \ + && dnf -y clean all + +ARG SYNC2JIRA_GIT_REPO=https://github.com/release-engineering/Sync2Jira.git +ARG SYNC2JIRA_GIT_REF=master +ARG SYNC2JIRA_CACERT_URL= +ARG SYNC2JIRA_VERSION= +ENV SYNC2JIRA_VERSION=$SYNC2JIRA_VERSION +ENV SYNC2JIRA_CACERT_URL=$SYNC2JIRA_CACERT_URL + +USER root + +# Create Sync2Jira folder +RUN mkdir -p /usr/local/src/sync2jira + +# Copy over our repo +COPY . /usr/local/src/sync2jira + +# Install Sync2Jira +RUN pip3 install --no-deps -v /usr/local/src/sync2jira + +# To deal with JIRA issues (i.e. SSL errors) +RUN chmod g+w /etc/pki/tls/certs/ca-bundle.crt +RUN chmod 777 /usr/local/src/sync2jira/openshift/docker-entrypoint.sh + +USER 1001 + +ENTRYPOINT ["/usr/local/src/sync2jira/openshift/docker-entrypoint.sh"] + +CMD ["/usr/local/bin/sync2jira"] diff --git a/Dockerfile.build b/Dockerfile.build new file mode 100644 index 0000000..a76875d --- /dev/null +++ b/Dockerfile.build @@ -0,0 +1,62 @@ +FROM fedora:latest +LABEL \ + name="sync2jira" \ + description="sync2jira application" \ + vendor="sync2jira developers" \ + license="GPLv2+" \ + build-date="" + +# Installing sync2jira dependencies +RUN dnf -y install \ + git \ + python3-pip \ + python3-requests \ + python3-jira \ + python3-fedmsg-core \ + python3-pygithub \ + python3-jinja2 \ + python3-pypandoc \ + python3-requests-kerberos \ + && dnf -y clean all + +ARG SYNC2JIRA_GIT_REPO=https://github.com/release-engineering/Sync2Jira.git +ARG SYNC2JIRA_GIT_REF=master +ARG SYNC2JIRA_CACERT_URL= +ARG SYNC2JIRA_VERSION= +ENV SYNC2JIRA_VERSION=$SYNC2JIRA_VERSION +ENV SYNC2JIRA_CACERT_URL=$SYNC2JIRA_CACERT_URL + +USER root + +# Create Sync2Jira folder +RUN mkdir -p /usr/local/src/sync2jira + +# Copy over our repo +COPY . /usr/local/src/sync2jira + +# To deal with JIRA issues (i.e. SSL errors) +RUN chmod g+w /etc/pki/tls/certs/ca-bundle.crt +RUN chmod 777 /usr/local/src/sync2jira/openshift/docker-entrypoint.sh + + +USER 1001 + +ENTRYPOINT ["/usr/local/src/sync2jira/openshift/docker-entrypoint.sh"] + +# OpenShift-Build Related +USER root +RUN chmod 777 /usr/local/src/sync2jira/openshift-build +RUN dnf -y install nodejs +RUN npm --prefix /usr/local/src/sync2jira/openshift-build/ install + +# Install Sync2Jira once +RUN mkdir -p /usr/local/src/sync2jira/openshift-build/temp \ + && git clone ${SYNC2JIRA_GIT_REPO} /usr/local/src/sync2jira/openshift-build/temp \ + && cd usr/local/src/sync2jira/openshift-build/temp \ + && python3 setup.py develop \ + && mkdir -p /usr/local/src/sync2jira/openshift-build/temp \ + && chgrp -R 0 /usr/local/src/sync2jira/openshift-build/temp \ + && chmod -R 777 /usr/local/src/sync2jira/openshift-build/temp + +USER 1001 +CMD ["npm", "--prefix", "/usr/local/src/sync2jira/openshift-build/", "start"] diff --git a/Dockerfile.deploy b/Dockerfile.deploy new file mode 100644 index 0000000..4379f66 --- /dev/null +++ b/Dockerfile.deploy @@ -0,0 +1,53 @@ +FROM fedora:latest +LABEL \ + name="sync2jira" \ + description="sync2jira application" \ + vendor="sync2jira developers" \ + license="GPLv2+" \ + build-date="" + +# Installing sync2jira dependencies +RUN dnf -y install \ + git \ + python3-pip \ + python3-requests \ + python3-jira \ + python3-fedmsg-core \ + python3-pygithub \ + python3-jinja2 \ + python3-pypandoc \ + && dnf -y clean all + +ARG SYNC2JIRA_GIT_REPO=https://github.com/release-engineering/Sync2Jira.git +ARG SYNC2JIRA_GIT_REF=master +ARG SYNC2JIRA_CACERT_URL= +ARG SYNC2JIRA_VERSION= +ENV SYNC2JIRA_VERSION=$SYNC2JIRA_VERSION +ENV SYNC2JIRA_CACERT_URL=$SYNC2JIRA_CACERT_URL + +USER root + +# Create Sync2Jira folder +RUN mkdir -p /usr/local/src/sync2jira + +# Copy over our repo +COPY . /usr/local/src/sync2jira + +# We also need to install requests_kerberos +RUN dnf -y install python3-requests-kerberos + +# Install Sync2Jira +RUN pip3 install --no-deps -v /usr/local/src/sync2jira + +# To deal with JIRA issues (i.e. SSL errors) +RUN chmod g+w /etc/pki/tls/certs/ca-bundle.crt +RUN chmod 777 /usr/local/src/sync2jira/openshift/docker-entrypoint.sh + +# Continuous Deployment Info +ARG RCM_TOOLS_REPO +RUN chmod 777 /usr/local/src/sync2jira/continuous-deployment/install_rhmsg.sh +RUN ./usr/local/src/sync2jira/continuous-deployment/install_rhmsg.sh $RCM_TOOLS_REPO + +USER 1001 +ENTRYPOINT ["/usr/local/src/sync2jira/openshift/docker-entrypoint.sh"] +CMD ["python3", "/usr/local/src/sync2jira/continuous-deployment/continuous_deployment.py"] \ No newline at end of file diff --git a/Dockerfile.sync-page b/Dockerfile.sync-page new file mode 100644 index 0000000..e707114 --- /dev/null +++ b/Dockerfile.sync-page @@ -0,0 +1,49 @@ +FROM fedora:latest +LABEL \ + name="sync2jira" \ + description="sync2jira application" \ + vendor="sync2jira developers" \ + license="GPLv2+" \ + build-date="" + +# Installing sync2jira dependencies +RUN dnf -y install \ + git \ + python3-pip \ + python3-requests \ + python3-jira \ + python3-fedmsg-core \ + python3-pygithub \ + python3-jinja2 \ + python3-pypandoc \ + python3-requests-kerberos \ + python3-flask \ + && dnf -y clean all + +ARG SYNC2JIRA_GIT_REPO=https://github.com/release-engineering/Sync2Jira.git +ARG SYNC2JIRA_GIT_REF=master +ARG SYNC2JIRA_CACERT_URL= +ARG SYNC2JIRA_VERSION= +ENV SYNC2JIRA_VERSION=$SYNC2JIRA_VERSION +ENV SYNC2JIRA_CACERT_URL=$SYNC2JIRA_CACERT_URL + +USER root + +# Create Sync2Jira folder +RUN mkdir -p /usr/local/src/sync2jira + +# Copy over our repo +COPY . /usr/local/src/sync2jira + +# Install Sync2Jira +RUN pip3 install --no-deps -v /usr/local/src/sync2jira + +# To deal with JIRA issues (i.e. SSL errors) +RUN chmod g+w /etc/pki/tls/certs/ca-bundle.crt +RUN chmod 777 /usr/local/src/sync2jira/openshift/docker-entrypoint.sh + +USER 1001 + +ENTRYPOINT ["/usr/local/src/sync2jira/openshift/docker-entrypoint.sh"] + +CMD ["python3", "/usr/local/src/sync2jira/sync-page/event-handler.py"] diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..12456bb --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,5 @@ +include LICENSE +include README.md +include fedmsg.d/sync2jira.py +include requirements.txt +include test-requirements.txt \ No newline at end of file diff --git a/README.md b/README.md index 6f43641..19a6617 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,36 @@ -# Webhook Sync2Jira +# Sync2Jira -This is a fork of [this](https://github.com/release-engineering/Sync2Jira/tree/master/sync2jira) version of Sync2Jira that aims to use GitHub webhook instead of relying on the Fed Message Bus. This was created as the original Sync2Jira did not support private repos, which was something that we needed for our application. +[![Documentation Status](https://readthedocs.org/projects/sync2jira/badge/?version=master)](https://sync2jira.readthedocs.io/en/master/?badge=master) +[![Docker Repository on Quay](https://quay.io/repository/redhat-aqe/sync2jira/status "Docker Repository on Quay")](https://quay.io/repository/redhat-aqe/sync2jira) +[![Build Status](https://travis-ci.org/release-engineering/Sync2Jira.svg?branch=master)](https://travis-ci.org/release-engineering/Sync2Jira) +[![Coverage Status](https://coveralls.io/repos/github/release-engineering/Sync2Jira/badge.svg?branch=master)](https://coveralls.io/github/release-engineering/Sync2Jira?branch=master) +![Python 3.7](https://img.shields.io/badge/python-3.7-blue.svg) +## What is Sync2Jira? +This is a process that listens to activity on upstream repos on pagure and +github via fedmsg, and syncs new issues there to a Jira instance elsewhere. -Similar documentation can be found [here](https://sync2jira.readthedocs.io/en/master/quickstart.html). + +## Documentation +Documentation is hosted on ReadTheDocs.io and can be found [here](https://sync2jira.readthedocs.io/en/latest/) + +## Configuration + +We have set up a quick-start [here](https://sync2jira.readthedocs.io/en/master/quickstart.html) + +Configuration is in `fedmsg.d/`. + +You can maintain a mapping there that allows you to match one upstream repo (say, 'pungi' on pagure) to a downstream project/component pair in Jira (say, 'COMPOSE', and 'Pungi'). + +On startup, if the initialize option is set to True in the fedmsg.d/ config, then all open issues from all upstream repos will be scraped and added to Jira if they are absent. + +If the testing option is set to True, then the script will perform a "dry run" and not actually add any new issues to Jira. + +Please look at our documentation [here](https://sync2jira.readthedocs.io/en/master/config-file.html) for a full list of what can be synced and how to set it up. + +## Branches + +We will maintain three branches + +1. `master` - This will be where our main code and most up to date code lives +1. `stage` - This will be our staging configuration. PR's will merge through stage to master +1. `openshift-build` - This branch will maintain OpenShift-Build related information \ No newline at end of file diff --git a/config/config.json b/config/config.json deleted file mode 100644 index d176db8..0000000 --- a/config/config.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "sync2jira": { - "admins": [{"admin_username": "admin_email@demo.com"}], - - "mailing-list": "some_email@demo.com", - - "listen": true, - - "testing": true, - - "develop": false, - - "github_token": "YOUR_TOKEN", - - "confluence_statistics": false, - - "legacy_matching": false, - - "default_jira_instance": "example", - "jira": { - "example": { - "options": { - "server": "https://some_jira_server_somewhere.com", - "verify": true - }, - "basic_auth": ["YOU_USERNAME", "YOUR_PASSWORD"] - } - }, - - "map": { - "github": { - "GITHUB_USERNAME/Demo_project": {"project": "FACTORY", "component": "gitbz", - "updates": [], "sync": ["pullrequest", "issue"]} - } - } - } -} \ No newline at end of file diff --git a/continuous-deployment/continuous-deployment.md b/continuous-deployment/continuous-deployment.md new file mode 100644 index 0000000..c97b766 --- /dev/null +++ b/continuous-deployment/continuous-deployment.md @@ -0,0 +1,8 @@ +# Continuous Deployment + +## TLDR +We use a message bus configured to listen to a topic pushed by [RepoTracker](https://github.com/release-engineering/repotracker). Once we find a message that indicates a change in any of the branches we're watching, we tag the new image in OpenShift which triggers a new deployment. + + +## Where can I learn more? +You can check our documentation [here](https://sync2jira.readthedocs.io/en/latest/continuous_deployment.html) to learn more about how to configure this feature in your project. \ No newline at end of file diff --git a/continuous-deployment/continuous_deployment.py b/continuous-deployment/continuous_deployment.py new file mode 100644 index 0000000..658c795 --- /dev/null +++ b/continuous-deployment/continuous_deployment.py @@ -0,0 +1,227 @@ +# Built-In Modules +import os +import requests +import json +import traceback +import logging + +# 3rd Party Modules +import jinja2 +from rhmsg.activemq.consumer import AMQConsumer + +# Local Modules +from sync2jira.mailer import send_mail +from sync2jira.main import load_config + +# Global Variables +handlers = [ + 'repotracker.container.tag.updated' +] +# Logging +log = logging.getLogger(__name__) +# OpenShift Related +TOKEN = os.environ['TOKEN'] +STAGE_TOKEN = os.environ['STAGE_TOKEN'] +ENDPOINT = os.environ['ENDPOINT'] +# Message Bus Related +CERT = os.environ['CERT'] +KEY = os.environ['KEY'] +CA_CERTS = os.environ['CA_CERTS'] +ACTIVEMQ_QUERY = os.environ['ACTIVEMQ_QUERY'] +ACTIVEMQ_URL_1 = os.environ['ACTIVEMQ_URL_1'] +ACTIVEMQ_URL_2 = os.environ['ACTIVEMQ_URL_2'] +# Message Bus Query Related +ACTIVEMQ_REPO_NAME = os.environ['ACTIVEMQ_REPO_NAME'] +# SEND_EMAILS +SEND_EMAILS = os.environ['SEND_EMAILS'] + +def main(): + """ + Main function to start listening + """ + try: + + # Create our consumer + log.info("Connecting to ACTIVEMQ as a consumer...") + c = AMQConsumer( + urls=(ACTIVEMQ_URL_1, ACTIVEMQ_URL_2), + certificate=CERT, + private_key=KEY, + trusted_certificates=CA_CERTS + ) + # Start listening + log.info('Starting up CD service...') + c.consume( + ACTIVEMQ_QUERY, + lambda msg, data: handle_message(msg, data) + ) + + except Exception as e : + log.error(f"Error!\nException {e}\nSending email..") + report_email('failure', 'Continuous-Deployment-Main', traceback.format_exc()) + + +def handle_message(msg, data): + """ + Handle incoming message + :param Dict msg: Incoming message + :param Dict data: Incoming data, if any + :return: + """ + msg_dict = json.loads(msg.body) + log.info(f"Encountered message: {msg_dict}") + status = None + if msg_dict['repo'] == ACTIVEMQ_REPO_NAME: + if msg_dict['tag'] == "master": + status, ret = update_tag(master=True) + elif msg_dict['tag'] == "stage": + status, ret = update_tag(stage=True) + elif msg_dict['tag'] == "openshift-build": + status, ret = update_tag(openshift_build=True) + elif msg_dict['tag'] == "sync-page": + status, ret = update_tag(sync_page=True) + else: + return + if status: + report_email('success', namespace=msg_dict['tag']) + else: + report_email('failure', data=msg_dict) + + +def update_tag(master=False, stage=False, openshift_build=False, sync_page=False): + """ + Update OpenShift master image when fedmsg topic comes in. + + :param Bool master: If we are tagging master + :param Bool stage: If we are tagging stage + :param Bool openshift_build: If we are tagging openshift-build + :param Bool sync_page: If we are tagging sync_page + :rtype (Bool, response): + :return: (Indication if we updated out image on OpenShift, API call response) + """ + # Format the URL + # Note: Here we assume that we have a pod for openshift-build running under the pod for stage. + if master: + umb_url = f"https://{ENDPOINT}/apis/image.openshift.io/v1/namespaces/sync2jira/imagestreamtags/sync2jira:latest" + namespace = 'sync2jira' + name = 'sync2jira:latest' + tag = 'latest' + elif sync_page: + umb_url = f"https://{ENDPOINT}/apis/image.openshift.io/v1/namespaces/sync2jira/imagestreamtags/sync2jira-sync-page:latest" + namespace = 'sync2jira' + name = 'sync2jira-sync-page:latest' + tag = 'sync-page' + elif stage: + umb_url = f"https://{ENDPOINT}/apis/image.openshift.io/v1/namespaces/sync2jira-stage/imagestreamtags/sync2jira-stage:latest" + namespace = 'sync2jira-stage' + name = 'sync2jira-stage:latest' + tag = 'stage' + elif openshift_build: + umb_url = f"https://{ENDPOINT}/apis/image.openshift.io/v1/namespaces/sync2jira-stage/imagestreamtags/openshift\-build:latest" + namespace = 'sync2ijra-stage' + name = 'openshift-build:latest' + tag = 'openshift-build' + else: + raise Exception("No type passed") + + # Make our put call + try: + ret = requests.put(umb_url, + headers=create_header(namespace), + data=json.dumps({ + "kind": "ImageStreamTag", + "apiVersion": "image.openshift.io/v1", + "metadata": { + "name": name, + "namespace": namespace, + "creationTimestamp": None + }, + "tag": { + "name": "", + "annotations": None, + "from": { + "kind": "DockerImage", + "name": f"quay.io/redhat-aqe/sync2jira:{tag}" + }, + "generation": 0, + "importPolicy": {}, + "referencePolicy": { + "type": "Source" + } + }, + "generation": 0, + "lookupPolicy": { + "local": False + }, + "image": { + "metadata": { + "creationTimestamp": None + }, + "dockerImageMetadata": None, + "dockerImageLayers": None + } + })) + except Exception as e: + log.error(f"Failure updating image stream tag.\nException: {e}") + report_email('failure', namespace, e) + if ret.status_code == 200: + log.info(f"Tagged new image for {name}") + return True, ret + else: + log.error(f"Failure updating image stream tag.\nResponse: {ret}") + return False, ret + + +def report_email(type, namespace=None, data=None): + """ + Helper function to alert admins in case of failure. + + :param String type: Type to be used + :param String namespace: Namespace being used + :param String data: Data being used + """ + if SEND_EMAILS == '0': + log.info(f"SEND_EMAILS set to 0 not sending email. Type: {type}. Namespace: {namespace}, Data: {data}") + return + # Load in the Sync2Jira config + config = load_config() + + # Email our admins with the traceback + templateLoader = jinja2.FileSystemLoader(searchpath='usr/local/src/sync2jira/continuous-deployment') + templateEnv = jinja2.Environment(loader=templateLoader) + + # Load in the type of template + if type is 'failure': + template = templateEnv.get_template('failure_template.jinja') + html_text = template.render(namespace=namespace, response=data) + elif type is 'success': + template = templateEnv.get_template('success_template.jinja') + html_text = template.render(namespace=namespace) + + # Send mail + send_mail(recipients=[config['sync2jira']['mailing-list']], + cc=None, + subject=f"Sync2Jira Build Image Update Status: {type}!", + text=html_text) + + +def create_header(namespace): + """ + Helper function to create default header + :param string namespace: Namespace to indicate which token to use + :rtype Dict: + :return: Default header + """ + if namespace in ['sync2jira-stage']: + token = STAGE_TOKEN + else: + token = TOKEN + return { + 'Authorization': f'Bearer {token.strip()}', + 'Accept': 'application/json', + 'Content-Type': 'application/json', + } + + +if __name__ == '__main__': + main() diff --git a/continuous-deployment/failure_template.jinja b/continuous-deployment/failure_template.jinja new file mode 100644 index 0000000..4100a3f --- /dev/null +++ b/continuous-deployment/failure_template.jinja @@ -0,0 +1,7 @@ + + +

Looks like updating the Sync2Jira {{ namespace }} on OpenShift failed!

+

Here is some more info:

+ {{ response }} + + \ No newline at end of file diff --git a/continuous-deployment/install_rhmsg.sh b/continuous-deployment/install_rhmsg.sh new file mode 100755 index 0000000..2e04ae5 --- /dev/null +++ b/continuous-deployment/install_rhmsg.sh @@ -0,0 +1,29 @@ +#!/usr/bin/bash + +main() { + if [ -n "$RCM_TOOLS_REPO" ]; then + repo_file=/usr/local/src/sync2jira/continuous-deployment/rcm-tools-fedora.repo + curl -L -o $repo_file $RCM_TOOLS_REPO + # Since we don't trust any internal CAs at this point, we must connect over + # http + sed -i 's/https:/http:/g' $repo_file + + # Install dnf-plugins core to allow for config-manager + yum install dnf-plugins-core -y + echo "Installed dnf-plugins-core" + + # Add our .repo file using config-manager + dnf config-manager --add-repo $repo_file + echo "Added .repo file" + + # Install python3-rhmsg + dnf install -y \ + --setopt=deltarpm=0 \ + --setopt=install_weak_deps=false \ + --setopt=tsflags=nodocs \ + python3-rhmsg + dnf clean all + echo "Installed rhmsg" + fi +} +main diff --git a/continuous-deployment/success_template.jinja b/continuous-deployment/success_template.jinja new file mode 100644 index 0000000..f20e5f8 --- /dev/null +++ b/continuous-deployment/success_template.jinja @@ -0,0 +1,5 @@ + + +

Sync2Jira {{ namespace }} was successfully updated!

+ + \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..69fe55e --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,19 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/docs-requirements.txt b/docs/docs-requirements.txt new file mode 100644 index 0000000..52b04f2 --- /dev/null +++ b/docs/docs-requirements.txt @@ -0,0 +1 @@ +sphinx_rtd_theme \ No newline at end of file diff --git a/docs/source/adding-new-repo-guide.rst b/docs/source/adding-new-repo-guide.rst new file mode 100644 index 0000000..4998af0 --- /dev/null +++ b/docs/source/adding-new-repo-guide.rst @@ -0,0 +1,25 @@ +Adding New Repos Guide +======================= + +Have you ever wanted to add new upstream repos? Well now you can! + +1. First ensure that your upstream repo is on the Fed Message Bus +2. Now add two new functions to `upstream.py` + * :code:`def hande_REPO-NAME_message(msg, config)` + * This function will take in a fedmessage message (msg) and the config dict + * This function will return a sync2jira.Intermediary.Issue object + * This function will be used when listening to the message bus + * :code:`def REPO-NAME_issues(upstream, config)` + * This function will take in an upstream repo name and the config dict + * This function will return a generator of sync2jira.Intermediary.Issue objects that contain all upstream Issues + * This function will be used to initialize and sync upstream/downstream issues +3. Now modify the `main.py` functions: + * :code:`def initialize(config)` + * Add another section (like Pagure and GitHub) to utlize the :code:`REPO-NAME_issues` function you just made. + * :code:`def listen(config)` + * Add another section to the if statement under Pagure and GitHub + * :code:`elif 'REPO-NAME' in suffix:` + * Now utilize the :code:`handle_REPO-NAME_message` function you just made +4. If all goes smoothly, your new repo should work with Sync2Jira! + +.. note:: If you want to submit a Pull Request, ensure that you add appropriate Unit Tests, Tox is passing, and you have appropriate documentation! diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..b0f62f8 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,185 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + + +# -- Project information ----------------------------------------------------- + +project = u'Sync2Jira' +copyright = u'2019, Ralph Bean' +author = u'Ralph Bean' + +# The short X.Y version +version = u'2.0' +# The full version, including alpha/beta/rc tags +release = u'2.0' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.githubpages', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['ntemplates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = None + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['nstatic'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +html_sidebars = { '**': ['globaltoc.html', 'searchbox.html'] } + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Sync2Jiradoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'Sync2Jira.tex', u'Sync2Jira Documentation', + u'Ralph Bean', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'sync2jira', u'Sync2Jira Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'Sync2Jira', u'Sync2Jira Documentation', + author, 'Sync2Jira', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + + +# -- Extension configuration ------------------------------------------------- \ No newline at end of file diff --git a/docs/source/config-file.rst b/docs/source/config-file.rst new file mode 100644 index 0000000..df1b08a --- /dev/null +++ b/docs/source/config-file.rst @@ -0,0 +1,187 @@ +Config File +=========== +The config file is made up of multiple parts + +.. code-block:: python + + 'admins': ['demo_jira_username'] + +* Admins can be users who manage Sync2Jira. They will be cc'd in any emails regarding duplicate issues found. + +.. code-block:: python + + 'mailing-list': 'demo_email@demo.com' + +* Mailing list is used to alert users when there is a failure. A failure email with the traceback will be sent to the email address. + +.. code-block:: python + + 'initialize': 1 + +* Initialization set to 1 will ensure that there is an initial sync done when Sync2Jira starts. + It is recommended to leave this as 1 to ensure that all issues are in sync. + +.. code-block:: python + + 'testing': True + +* Testing is a flag that will determine if any changes are actually made downstream (on JIRA tickets). + Set to false if you are developing and don't want any changes to take effect. + +.. code-block:: python + + 'develop': False + +* If the develop flag is set to :code:`False` then Sync2Jira will perform a sentinel query after + getting a JIRA client and failure email will be sent anytime the service fails. + +.. code-block:: python + + 'github_token': 'YOUR_TOKEN', + +* This is where you can enter your GitHub API token. + +.. code-block:: python + + 'default_jira_instance': 'example' + +* This is the default JIRA instance to be used if none is provided in the project. + +.. code-block:: python + + 'confluence_statistics': False + +* Adds the ability to post statistics to confluence. See note below. + +.. note:: Optional: Confluence Statistic Support + + a. What is it? If :code:`confluence_statistics` is set to `True` in the config file (default `False`) you can set up a Confluence page and space to post statistic too (i.e. how many comments synced etc) + + b. Set up the following variables: + 1. :code:`CONFLUENCE_SPACE` :: The Confluence space we're posting too + 2. :code:`CONFLUENCE_PAGE_TITLE` :: The Confluence page we're posting too + 3. :code:`CONFLUENCE_URL` :: The Confluence URL + 4. :code:`CONFLUENCE_USERNAME` :: Confluence username data + 5. :code:`CONFLUENCE_PASSWORD` :: Confluence password data + + c. Create the related confluence page and space. Make sure to add the template (use :code:`sync2jira/confluence_stat.jinja` and replace the JINJA code with 0's + + +.. code-block:: python + + 'jira': { + 'example': { + 'options': { + 'server': 'https://some_jira_server_somewhere.com', + 'verify': True, + }, + 'basic_auth': ('YOU_USERNAME', 'YOUR_PASSWORD'), + }, + }, + +* Here you can configure multiple JIRA instances if you have projects with differing downstream JIRA instances. + Ensure to name them approproialty, in name of the JIRA instance above is `example`. + +.. code-block:: python + + 'map': { + 'pagure': { + 'Demo_project': {'project': 'FACTORY', 'component': 'gitbz', + 'issue_updates': [...], 'pr_updates': [...], 'mapping': [...], 'labels': [...], + 'owner': 'jira_username'}, + # 'koji': { 'project': 'BREW', 'component': None, }, + }, + 'github': { + 'GITHUB_USERNAME/Demo_project': {'project': 'FACTORY', 'component': 'gitbz', + 'issue_updates': [...], 'pr_updates': [...], 'mapping': [...], 'labels': [...], + 'owner': 'jira_username'}, + }, + }, + +* You can add the following to your project configuration: + + * :code:`'project'` + * Downstream project to sync with + * :code:`'component'` + * Downstream component to sync with + * :code:`sync` + * This array contains information on what to sync from upstream repos (i.e. 'issue' and/or 'pullrequest') + * :code:`'owner'` + * Optional (Recommended): Alerts the owner of an issue if there are duplicate issues present + * :code:`'qa-contact'` + * Optional: Automatically add a QA contact field when issues are created + * :code:`'epic-link'` + * Optional: Pass the downstream key to automatically create an epic-link when issues are created + * :code:`'labels': ['tag1'..]` + * Optional: Field to have custom set labels on all downstream issues created. + * :code:`'EXD-Service': {'guild': 'SOME_GUILD', 'value': 'SOME_VALUE'}` + * Sync custom EXD-Service field + + .. note:: + + :pullrequest: After enabling PR syncing, just type "Relates to JIRA: XXXX-1234" in the comment or description of the PR to sync with a JIRA issue. After this, updates such as when it has been merged will automatically be added to the JIRA ticket. + +* You can add your projects here. The 'project' field is associated with downstream JIRA projects, and 'component' with downstream components + You can add the following to the :code:`issue_updates` array: + + * :code:`'comments'` + * Sync comments and comment edits + * :code:`{'tags': {'overwrite': True/False}}` + * Sync tags, do/don't overwrite downstream tags + * :code:`{'fixVersion': {'overwrite': True/False}}` + * Sync fixVersion (downstream milestone), do/don't overwrite downstream fixVersion + * :code:`{'assignee': {'overwrite': True/False}}` + * Sync assignee (for Github only the first assignee will sync) do/don't overwrite downstream assignee + * :code:`'description'` + * Sync description + * :code:`'title'` + * Sync title + * :code:`{'transition': True/'CUSTOM_TRANSITION'}` + * Sync status (open/closed), Sync only status/Attempt to transition JIRA ticket to CUSTOM_TRANSITION on upstream closure + * :code:`{'on_close': {'apply_lables': ['label', ...]}}` + * When the upstream issue is closed, apply additional labels on the corresponding Jira ticket. + * :code:`github_markdown` + * If description syncing is turned on, this flag will convert Github markdown to plaintext. This uses the pypandoc module. + * :code:`upstream_id` + * If selected this will add a comment to all newly created JIRA issue in the format 'UPSTREAM_PROJECT-#1' where the number indicates the issue ID. This allows users to search for the issue on JIRA via the issue number. + * :code:`url` + * This flag will add the upstream url to the bottom of the JIRA ticket + + .. note:: + + :Overwrite: Setting this to :code:`True` will ensure that Upstream (GitHub or Pagure) values will overwrite downstream ones (i.e. if its empty upstream it'll be empty downstream) + :CUSTOM_TRANSITION: Setting this value will get Sync2Jira to automatially transition downstream tickets once their upstream counterparts get closed. Set this to whatever 'closed' means downstream. + +* You can add your projects here. The 'project' field is associated with downstream JIRA projects, and 'component' with downstream components + You can add the following to the :code:`pr_updates` array: + + * :code:`{'merge_transition': 'CUSTOM_TRANSITION'}` + * Sync when upstream PR gets merged. Attempts to transition JIRA ticket to CUSTOM_TRANSITION on upstream merge + * :code:`{'link_transition': 'CUSTOM_TRANSITION'}` + * Sync when upstream PR gets linked. Attempts to transition JIRA ticket to CUSTOM_TRANSITION on upstream link + +* You can add the following to the mapping array. This array will map an upstream field to the downstream counterpart with XXX replaced. + + * :code:`{'fixVersion': 'Test XXX'}` + * Maps upstream milestone (suppose it's called 'milesone') to downstream fixVersion with a mapping (for our example it would be 'Test milesone') + +* It is strongly encouraged for teams to use the :code:`owner` field. If configured, owners will be alerted if Sync2Jira finds duplicate downstream issues. + Further the owner will be used as a default in case the program is unable to find a valid assignee. + +.. code-block:: python + + 'filters': { + 'github': { + # Only sync multi-type tickets from bodhi. + 'fedora-infra/bodhi': {'status': 'open', 'milestone': 4, }, + }, + } + +* You can also add filters per-project. The following can be added to the filter dict: + + * :code:`status` + * Open/Closed + * :code:`tags` + * List of tags to look for + * :code:`milestone` + * Upstream milestone status diff --git a/docs/source/confluence_client.rst b/docs/source/confluence_client.rst new file mode 100644 index 0000000..7e9734f --- /dev/null +++ b/docs/source/confluence_client.rst @@ -0,0 +1,8 @@ +Confluence Client +================= + +.. automodule:: sync2jira.confluence_client + :members: + :private-members: + + diff --git a/docs/source/continuous_deployment.rst b/docs/source/continuous_deployment.rst new file mode 100644 index 0000000..aa8b064 --- /dev/null +++ b/docs/source/continuous_deployment.rst @@ -0,0 +1,36 @@ +Continuous Deployment +====================== +We utilize OpenShift to deploy our Sync2Jira instance. Here we assume you have configured your quay.io repo to push to some sort of message bus. This will likely have to be done with an external script (something we do not support). + +The following environmental variables will have to be set: + +Related to OpenShift: + 1. :code:`TOKEN` :: OpenShift Token to be used + 2. :code:`STAGE_TOKEN` :: OpenShift Stage namespace Token to be used + 3. :code:`SEND_EMAILS` :: 0/1 if we should/shouldn't send emails + 4. :code:`DEFAULT_FROM` :: Default email address + 5. :code:`DEFAULT_SERVER` :: Email server to use + 6. :code:`RCM_TOOLS_REPO` :: Repo URL to download rhmsg + 7. :code:`CA_URL` :: CA URL + 8. :code:`ENDPOINT` :: OpenShift Endpoint to use + 9. :code:`NAMESPACE` :: OpenShift Namespace to use +Message Bus Related: + 1. :code:`CERT` :: Cert file that should be used (can be in .pem format) + 2. :code:`KEY` :: Key file that should be used (in .key format) + 3. :code:`CA_CERTS` :: CA Certs that should be used + 4. :code:`ACTIVEMQ_QUERY` :: Query that we should be using + 5. :code:`ACTIVEMQ_URL_1` :: Message Bus URL are tuple, this is the first part of that tuple + 6. :code:`ACTIVEMQ_URL_2` :: Message Bus URL are tuple, this is the second part of that tuple + 7. :code:`ACTIVEMQ_REPO_NAME` :: Repo (or topic/category) that we should be listening for + +Once these variables have been set in an OpenShift pod (using `Dockerfile.deploy` image) it will listen for messages that trigger an OpenShift tag (i.e. :code:`oc tag ...`). +The script is set up for 3 different tags: + 1. `master` + 2. `stage` + 3. `openshift-build` + +Please make sure these 3 branches exists and are maintained. Also make sure to examine the code and set up your project correctly. We assume that `openshift-build` is running under the stage namespace. + +.. note:: How do we listen to repo builds? + + We use `repotracker ` to listen for repo changes. \ No newline at end of file diff --git a/docs/source/downstream_issue.rst b/docs/source/downstream_issue.rst new file mode 100644 index 0000000..edef51e --- /dev/null +++ b/docs/source/downstream_issue.rst @@ -0,0 +1,8 @@ +Downstream Issue +================ + +.. automodule:: sync2jira.downstream_issue + :members: + :private-members: + + diff --git a/docs/source/downstream_pr.rst b/docs/source/downstream_pr.rst new file mode 100644 index 0000000..77f503e --- /dev/null +++ b/docs/source/downstream_pr.rst @@ -0,0 +1,8 @@ +Downstream PR +============= + +.. automodule:: sync2jira.downstream_pr + :members: + :private-members: + + diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..8976992 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,33 @@ +Sync2Jira documentation +===================================== + + +.. toctree:: + :maxdepth: 2 + :caption: Setup Guide + + quickstart + config-file + continuous_deployment + sync_page + + +.. toctree:: + :maxdepth: 1 + :caption: Adding New Repos + + adding-new-repo-guide + + +.. toctree:: + :maxdepth: 5 + :caption: Code Documentation + + main + upstream_pr + upstream_issue + downstream_pr + downstream_issue + intermediary + mailer + confluence_client \ No newline at end of file diff --git a/docs/source/intermediary.rst b/docs/source/intermediary.rst new file mode 100644 index 0000000..55b5f93 --- /dev/null +++ b/docs/source/intermediary.rst @@ -0,0 +1,8 @@ +Intermediary +============ + +Sync2Jira converts upstream issues/PRs into custom Issue/PR objects. + +.. automodule:: sync2jira.intermediary + :members: + :private-members: diff --git a/docs/source/mailer.rst b/docs/source/mailer.rst new file mode 100644 index 0000000..1663355 --- /dev/null +++ b/docs/source/mailer.rst @@ -0,0 +1,7 @@ +Mailer +======= + +.. automodule:: sync2jira.mailer + :members: + :private-members: + diff --git a/docs/source/main.rst b/docs/source/main.rst new file mode 100644 index 0000000..a2720be --- /dev/null +++ b/docs/source/main.rst @@ -0,0 +1,6 @@ +Main +==== + +.. automodule:: sync2jira.main + :members: + :private-members: diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst new file mode 100644 index 0000000..1bb1ec1 --- /dev/null +++ b/docs/source/quickstart.rst @@ -0,0 +1,70 @@ +Quick Start +============ + +Want to quickly get started working with Sync2Jira? Follow these steps: + +1. **First open up** :code:`fedmsg.d/sync2jira.py` + +2. Enter your GitHub token which you can get `here `_ + .. code-block:: python + + 'github_token': 'YOUR_TOKEN', + +3. Enter relevent JIRA information + .. code-block:: python + + 'default_jira_instance': 'example', + 'jira': { + 'example': { + 'options': { + 'server': 'https://some_jira_server_somewhere.com', + 'verify': True, + }, + 'basic_auth': ('YOU_USERNAME', 'YOUR_PASSWORD'), + }, + }, + + .. note:: You might have to set verify to False + +4. Add your upstream repos to the `map` section + .. code-block:: python + + 'map': { + 'pagure': { + 'Demo_project': {'project': 'FACTORY', 'component': 'gitbz', + 'updates': [...], 'sync': [..]}, + # 'koji': { 'project': 'BREW', 'component': None, }, + }, + 'github': { + 'GITHUB_USERNAME/Demo_project': {'project': 'FACTORY', 'component': 'gitbz', + 'updates': [...], 'sync': [..]}, + }, + }, + + .. note:: You can learn more about what can go into the updates list `here `_ + +5. Finally you can tweak the config files optional settings to your liking + .. code-block:: python + + # Admins to be cc'd in duplicate emails + 'admins': ['demo_jira_username'], + # Scrape sources at startup + 'initialize': True, + # Don't actually make changes to JIRA... + 'testing': True, + + 'filters': { + 'github': { + # Only sync multi-type tickets from bodhi. + 'fedora-infra/bodhi': {'state': 'open', 'milestone': 4, }, + }, + } +6. Now that you're done with the config file you can install sync2jira and run + .. code-block:: shell + + python setup.py install + >> .... + >> Finished processing dependencies for sync2jira==1.7 + sync2jira + .. note:: You might have to add `config['validate_signatures'] = False`. + You can find out more under the `main `_. diff --git a/docs/source/sync_page.rst b/docs/source/sync_page.rst new file mode 100644 index 0000000..0e17bf8 --- /dev/null +++ b/docs/source/sync_page.rst @@ -0,0 +1,22 @@ +Sync Page +====================== +We noticed that sometimes tickets would be lost and not sync. This would require taking down the entire service in order to re-sync that one ticket. To fix this we created a flask micro-service that provides a UI for users to sync individual repos. + +The following environmental variables will have to be set: + +Related to OpenShift: + 1. :code:`CA_URL` :: CA URL used for sync2jira + 2. :code:`DEFAULT_SERVER` :: Default server to use for mailing + 3. :code:`DEFAULT_FROM` :: Default from to use for mailing + 4. :code:`USER` :: JIRA username + 5. :code:`CONFLUENCE_SPACE` :: Confluence space (should be set to "mock_confluence_space" as we don't want any confluence syncing) + 6. :code:`INITIALIZE` :: True/False Initialize our repos on startup (Should be set to "0") + 7. :code:`IMAGE_URL` :: Image URL:TAG to pull from + 8. :code:`JIRA_PNT_PASS` :: PNT password in base64 + 9. :code:`JIRA_OMEGAPRIME_PASS` :: Omegaprime password in base64 + 10. :code:`GITHUB_TOKEN` :: GitHub token in base64 + 11. :code:`PAAS_DOMAIN` :: Domain to use for the service + +You can also use the OpenShift template to quickly deploy this service (it can be found in the repo under :code:`openshift/sync2jira-sync-page-template.yaml`) + +Once deployed, you can go to the url :code:`sync2jira-page-sync.PAAS_DOMAIN` to select and sync individual repos! \ No newline at end of file diff --git a/docs/source/upstream_issue.rst b/docs/source/upstream_issue.rst new file mode 100644 index 0000000..443de82 --- /dev/null +++ b/docs/source/upstream_issue.rst @@ -0,0 +1,5 @@ +Upstream Issue +============== + +.. automodule:: sync2jira.upstream_issue + :members: diff --git a/docs/source/upstream_pr.rst b/docs/source/upstream_pr.rst new file mode 100644 index 0000000..419052b --- /dev/null +++ b/docs/source/upstream_pr.rst @@ -0,0 +1,5 @@ +Upstream PR +============ + +.. automodule:: sync2jira.upstream_pr + :members: diff --git a/fedmsg.d/sync2jira.py b/fedmsg.d/sync2jira.py new file mode 100644 index 0000000..86c1cf2 --- /dev/null +++ b/fedmsg.d/sync2jira.py @@ -0,0 +1,74 @@ +# This file is part of sync2jira. +# Copyright (C) 2016 Red Hat, Inc. +# +# sync2jira is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# sync2jira is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with sync2jira; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110.15.0 USA +# +# Authors: Ralph Bean + +config = { + 'sync2jira': { + # Admins to be cc'd in duplicate emails + 'admins': [{'admin_username': 'admin_email@demo.com'}], + + # Mailing list email to send failure-email notices too + 'mailing-list': 'some_email@demo.com', + + # Listen on the message bus + 'listen': True, + + # Don't actually make changes to JIRA... + 'testing': True, + + # Set to True when developing to disable sentinel query + 'develop': False, + + # Your Github token + 'github_token': 'YOUR_TOKEN', + + # If we should update a Confluence page for stats + 'confluence_statistics': False + + 'legacy_matching': False, + + 'default_jira_instance': 'example', + 'jira': { + 'example': { + 'options': { + 'server': 'https://some_jira_server_somewhere.com', + 'verify': True, + }, + 'basic_auth': ('YOU_USERNAME', 'YOUR_PASSWORD'), + }, + }, + + 'map': { + 'pagure': { + 'Demo_project': {'project': 'FACTORY', 'component': 'gitbz', + 'updates': [...], 'sync': ['pullrequest', 'issue']}, + # 'koji': { 'project': 'BREW', 'component': None, }, + }, + 'github': { + 'GITHUB_USERNAME/Demo_project': {'project': 'FACTORY', 'component': 'gitbz', + 'updates': [...], 'sync': ['pullrequest', 'issue']}, + }, + }, + 'filters': { + 'github': { + # Only sync multi-type tickets from bodhi. + 'fedora-infra/bodhi': {'status': 'open', 'milestone': 4, }, + }, + } + }, +} \ No newline at end of file diff --git a/openshift-build/app.yml b/openshift-build/app.yml new file mode 100644 index 0000000..58775e3 --- /dev/null +++ b/openshift-build/app.yml @@ -0,0 +1,139 @@ +# This is a GitHub App Manifest. These settings will be used by default when +# initially configuring your GitHub App. +# +# NOTE: changing this file will not update your GitHub App settings. +# You must visit github.com/settings/apps/your-app-name to edit them. +# +# Read more about configuring your GitHub App: +# https://probot.github.io/docs/development/#configuring-a-github-app +# +# Read more about GitHub App Manifests: +# https://developer.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/ + +# The list of events the GitHub App subscribes to. +# Uncomment the event names below to enable them. +default_events: +- check_run +- check_suite +# - commit_comment +# - create +# - delete +# - deployment +# - deployment_status +# - fork +# - gollum +# - issue_comment +# - issues +# - label +# - milestone +# - member +# - membership +# - org_block +# - organization +# - page_build +# - project +# - project_card +# - project_column +# - public +# - pull_request +# - pull_request_review +# - pull_request_review_comment +# - push +# - release +# - repository +# - repository_import +# - status +# - team +# - team_add +# - watch + +# The set of permissions needed by the GitHub App. The format of the object uses +# the permission name for the key (for example, issues) and the access type for +# the value (for example, write). +# Valid values are `read`, `write`, and `none` +default_permissions: + # Repository creation, deletion, settings, teams, and collaborators. + # https://developer.github.com/v3/apps/permissions/#permission-on-administration + # administration: read + + # Checks on code. + # https://developer.github.com/v3/apps/permissions/#permission-on-checks + checks: write + + # Repository contents, commits, branches, downloads, releases, and merges. + # https://developer.github.com/v3/apps/permissions/#permission-on-contents + # contents: read + + # Deployments and deployment statuses. + # https://developer.github.com/v3/apps/permissions/#permission-on-deployments + # deployments: read + + # Issues and related comments, assignees, labels, and milestones. + # https://developer.github.com/v3/apps/permissions/#permission-on-issues + # issues: read + + # Search repositories, list collaborators, and access repository metadata. + # https://developer.github.com/v3/apps/permissions/#metadata-permissions + metadata: read + + # Retrieve Pages statuses, configuration, and builds, as well as create new builds. + # https://developer.github.com/v3/apps/permissions/#permission-on-pages + # pages: read + + # Pull requests and related comments, assignees, labels, milestones, and merges. + # https://developer.github.com/v3/apps/permissions/#permission-on-pull-requests + # pull_requests: read + + # Manage the post-receive hooks for a repository. + # https://developer.github.com/v3/apps/permissions/#permission-on-repository-hooks + # repository_hooks: read + + # Manage repository projects, columns, and cards. + # https://developer.github.com/v3/apps/permissions/#permission-on-repository-projects + # repository_projects: read + + # Retrieve security vulnerability alerts. + # https://developer.github.com/v4/object/repositoryvulnerabilityalert/ + # vulnerability_alerts: read + + # Commit statuses. + # https://developer.github.com/v3/apps/permissions/#permission-on-statuses + # statuses: read + + # Organization members and teams. + # https://developer.github.com/v3/apps/permissions/#permission-on-members + # members: read + + # View and manage users blocked by the organization. + # https://developer.github.com/v3/apps/permissions/#permission-on-organization-user-blocking + # organization_user_blocking: read + + # Manage organization projects, columns, and cards. + # https://developer.github.com/v3/apps/permissions/#permission-on-organization-projects + # organization_projects: read + + # Manage team discussions and related comments. + # https://developer.github.com/v3/apps/permissions/#permission-on-team-discussions + # team_discussions: read + + # Manage the post-receive hooks for an organization. + # https://developer.github.com/v3/apps/permissions/#permission-on-organization-hooks + # organization_hooks: read + + # Get notified of, and update, content references. + # https://developer.github.com/v3/apps/permissions/ + # organization_administration: read + + +# The name of the GitHub App. Defaults to the name specified in package.json +# name: My Probot App + +# The homepage of your GitHub App. +# url: https://example.com/ + +# A description of the GitHub App. +# description: A description of my awesome app + +# Set to true when your GitHub App is available to the public or false when it is only accessible to the owner of the app. +# Default: true +# public: false diff --git a/openshift-build/index.js b/openshift-build/index.js new file mode 100644 index 0000000..1840bc6 --- /dev/null +++ b/openshift-build/index.js @@ -0,0 +1,154 @@ +// Global Variables +APP_NAME=process.env.APP_NAME; +TEST_COMMAND=process.env.TEST_COMMAND; +const fs = require('fs'); +const childProcess = require("child_process"); + +module.exports = app => { + // When a Pull Request is opened or Re-Opened + app.on(['pull_request.opened', 'pull_request.reopened', 'pull_request.synchronize'], pull_request); + async function pull_request (context) { + // Identify start time + const startTime = new Date(); + + // Extract relevant information + const pr = context.payload.pull_request; + const headBranch = pr.head.ref; + const headSha = pr.head.sha; + + // Mark the check as pending + await context.github.checks.create(context.repo({ + name: APP_NAME, + head_branch: headBranch, + head_sha: headSha, + status: 'in_progress', + started_at: startTime, + })); + + try { + console.log("Running tests..."); + await childProcess.exec("/usr/local/src/sync2jira/openshift-build/runTests.sh " + headSha + " \"" + TEST_COMMAND + "\"", function(error, standardOutput, standardError) { + console.log("Ran tests. " + standardOutput); + + // Check if failure file exists + let passed = 'failure'; + if(fs.existsSync('/usr/local/src/sync2jira/openshift-build/temp/failure.sync2jira')) { + console.log("The failure file exists."); + childProcess.exec("rm /usr/local/src/sync2jira/openshift-build/temp/failure.sync2jira", function(error, standardOutput, standardError) { + console.log("Deleting sync2jira.failure..."); + console.log(standardOutput); + console.log(standardError); + }); + } else { + console.log('The failure file does not exist.'); + passed = 'success' + } + + console.log("Pushing results of test..."); + return context.github.checks.create(context.repo({ + name: APP_NAME, + head_branch: headBranch, + head_sha: headSha, + status: 'completed', + started_at: startTime, + conclusion: passed, + completed_at: new Date(), + output: { + title: passed, + summary: standardOutput.toString() + } + })) + }); + } + catch { + return await context.github.checks.create(context.repo({ + name: APP_NAME, + head_branch: headBranch, + head_sha: headSha, + status: 'completed', + started_at: startTime, + conclusion: passed, + completed_at: new Date(), + output: { + title: passed, + summary: 'Error when cloning or running tests.' + } + })) + } + } + + // When someone adds a commit to a Pull Request + app.on(['check_suite.requested', 'check_run.rerequested'], check_suite); + async function check_suite (context) { + // Identify start time + const startTime = new Date(); + + // Extract relevant information + let pr = context.payload.check_suite; + if (typeof pr == 'undefined') { + pr = context.payload.check_run + } + const headBranch = pr.head_branch; + const headSha = pr.head_sha; + + // Mark the check as pending + await context.github.checks.create(context.repo({ + name: APP_NAME, + head_branch: headBranch, + head_sha: headSha, + status: 'in_progress', + started_at: startTime, + })); + + try { + console.log("Running Tests..."); + await childProcess.exec("/usr/local/src/sync2jira/openshift-build/runTests.sh " + headSha + " \"" + TEST_COMMAND + "\"", function(error, standardOutput, standardError) { + console.log("Ran tests. " + standardOutput); + + // Check if failure file exists + let passed = 'failure'; + if(fs.existsSync('/usr/local/src/sync2jira/openshift-build/temp/failure.sync2jira')) { + console.log("The failure file exists."); + childProcess.exec("rm /usr/local/src/sync2jira/openshift-build/temp/failure.sync2jira", function(error, standardOutput, standardError) { + console.log("Deleting sync2jira.failure..."); + console.log(standardOutput); + console.log(standardError); + }); + } else { + console.log('The failure file does not exist.'); + passed = 'success' + } + + console.log("Pushing results of test..."); + return context.github.checks.create(context.repo({ + name: APP_NAME, + head_branch: headBranch, + head_sha: headSha, + status: 'completed', + started_at: startTime, + conclusion: passed, + completed_at: new Date(), + output: { + title: passed, + summary: standardOutput.toString() + } + })) + }); + } + catch { + return await context.github.checks.create(context.repo({ + name: APP_NAME, + head_branch: headBranch, + head_sha: headSha, + status: 'completed', + started_at: startTime, + conclusion: passed, + completed_at: new Date(), + output: { + title: passed, + summary: 'Error when cloning or running tests.' + } + })) + } + } +}; diff --git a/openshift-build/package.json b/openshift-build/package.json new file mode 100644 index 0000000..bef54e8 --- /dev/null +++ b/openshift-build/package.json @@ -0,0 +1,53 @@ +{ + "name": "openshift-build", + "version": "1.0.0", + "private": true, + "description": "Build and test GitHub code in OpenShift", + "author": "Sidhartha Premkumar ", + "license": "ISC", + "repository": "https://github.com/sidpremkumar/openshift-build.git", + "homepage": "https://github.com/sidpremkumar/openshift-build", + "bugs": "https://github.com/sidpremkumar/openshift-build/issues", + "keywords": [ + "probot", + "github", + "probot-app" + ], + "scripts": { + "dev": "nodemon", + "start": "probot run ./index.js", + "lint": "standard --fix", + "test": "jest && standard", + "test:watch": "jest --watch --notify --notifyMode=change --coverage" + }, + "dependencies": { + "probot": "^9.5.3", + "rimraf": "^3.0.0", + "shelljs": "^0.8.3" + }, + "devDependencies": { + "jest": "^24.9.0", + "nock": "^11.4.0", + "nodemon": "^2.0.0", + "smee-client": "^1.1.0", + "standard": "^14.3.1" + }, + "engines": { + "node": ">= 8.3.0" + }, + "standard": { + "env": [ + "jest" + ] + }, + "nodemonConfig": { + "exec": "npm start", + "watch": [ + ".env", + "." + ] + }, + "jest": { + "testEnvironment": "node" + } +} diff --git a/openshift-build/runTests.sh b/openshift-build/runTests.sh new file mode 100755 index 0000000..aae9442 --- /dev/null +++ b/openshift-build/runTests.sh @@ -0,0 +1,43 @@ +#!/bin/bash +set -ex + +# Arguments +HEAD_SHA=$1 +TEST_COMMAND=$2 + +# cd into our temp repo +cd /usr/local/src/sync2jira/openshift-build/temp + +run_tests() { + echo "**Using:**" + echo "**HEAD_SHA:** ${HEAD_SHA}" + echo "**TEST_COMMAND:** ${TEST_COMMAND}" + + # Declare our name + git config user.email "sync2jira@redhat.com" + git config user.name "Red User" + + # Checkout to a new branch with our new sha + echo "**Moving to sha ${HEAD_SHA}**" + git fetch --all + git reset --hard ${HEAD_SHA} + + # Run our commands, if failure touch a file + echo "**Running test command...**" + ${TEST_COMMAND} || touch failure.sync2jira + + # Display the results of our test + echo "**Integration log:**" + INTEGRATION_LOG=$(cat integration_test.log) + echo "$INTEGRATION_LOG" + + echo "**Main log:**" + MAIN_LOG=$(cat sync2jira_main.log) + echo "$MAIN_LOG" + + # Delete our logs + rm integration_test.log + rm sync2jira_main.log +} + +run_tests || touch failure.sync2jira diff --git a/openshift/docker-entrypoint.sh b/openshift/docker-entrypoint.sh new file mode 100644 index 0000000..1f316cf --- /dev/null +++ b/openshift/docker-entrypoint.sh @@ -0,0 +1,18 @@ +#!/bin/bash +set -e + +# CA_URL is the URL of a custom root CA certificate to be installed at run-time +: ${CA_URL:=} + +main() { + # installing CA certificate + if [ -n "${CA_URL}" ] && [ ! -f "/tmp/.ca-imported" ]; then + # Since update-ca-trust doesn't work as a non-root user, let's just append to the bundle directly + curl --silent --show-error --location "${CA_URL}" >> /etc/pki/tls/certs/ca-bundle.crt + # Create a file so we know not to import it again if the container is restarted + touch /tmp/.ca-imported + fi +} + +main +exec "$@" diff --git a/openshift/openshift-build-template.yaml b/openshift/openshift-build-template.yaml new file mode 100644 index 0000000..e1b69b0 --- /dev/null +++ b/openshift/openshift-build-template.yaml @@ -0,0 +1,165 @@ +apiVersion: v1 +kind: Template +metadata: + name: openshift-build-template + app: openshift-build +labels: + app: openshift-build +parameters: +- name: IMAGE_URL + displayName: Image URL:TAG to pull from + required: true + value: "quay.io/redhat-aqe/sync2jira:openshift-build" +- name: PRIVATE_KEY + displayName: Private Key to use (base64) + requiered: true +- name: LOG_LEVEL + displayName: Log level to use for our server + requiered: true + value: "debug" +- name: WEBHOOK_PROXY_URL + displayName: Our Smee.io proxy URL that our GitHub project is configured to push to + requiered: true +- name: WEBHOOK_SECRET + displayName: Our GitHub webhook secret (base64) + requiered: true +- name: APP_ID + displayName: Our GitHub App ID + requiered: true +- name: APP_NAME + displayName: Our GitHub App Name + requiered: true +- name: TEST_COMMAND + displayName: Test command that should be executed + requiered: true + value: "python3 tests/integration_tests/integration_test.py" +- name: JIRA_STAGE_URL + displayName: JIRA Stage URL to use for testing + requiered: true +- name: JIRA_USER + displayName: JIRA user to use for testing + requiered: true +- name: INITIALIZE + displayName: Flag to initialize sync2jira + value: "1" +- name: DEFAULT_SERVER + displayName: Default server to use for mailing + required: true +- name: DEFAULT_FROM + displayName: Default from to use for mailing + required: true +- name: CA_URL + displayName: CA URL to use to download certs + required: true +objects: +- apiVersion: v1 + kind: ImageStream + metadata: + labels: + app: openshift-build + annotations: + openshift.io/display-name: openshift-build + name: openshift-build + spec: + lookupPolicy: + local: false + tags: + - from: + kind: DockerImage + name: ${IMAGE_URL} + name: latest + referencePolicy: + type: Source +- apiVersion: v1 + kind: Secret + metadata: + name: openshift-build-secret + labels: + app: openshift-build + data: + webhook-secret: |- + ${WEBHOOK_SECRET} + private-key: |- + ${PRIVATE_KEY} +- apiVersion: v1 + kind: DeploymentConfig + metadata: + name: openshift-build + labels: + app: openshift-build + spec: + selector: + service: openshift-build + template: + metadata: + labels: + app: openshift-build + service: openshift-build + spec: + containers: + - name: openshift-build-container + image: openshift-build:latest + imagePullPolicy: Always + volumeMounts: + - name: fedmsgd-volume + mountPath: /etc/fedmsg.d + readOnly: true + env: + - name: JIRA_PASS + valueFrom: + secretKeyRef: + key: jira_pnt_pass + name: sync2jira-secret + - name: PRIVATE_KEY + valueFrom: + secretKeyRef: + key: private-key + name: openshift-build-secret + - name: SYNC2JIRA_GITHUB_TOKEN + valueFrom: + secretKeyRef: + key: github_token + name: sync2jira-secret + - name: DEFAULT_FROM + value: ${DEFAULT_FROM} + - name: DEFAULT_SERVER + value: ${DEFAULT_SERVER} + - name: CA_URL + value: ${CA_URL} + - name: LOG_LEVEL + value: ${LOG_LEVEL} + - name: WEBHOOK_PROXY_URL + value: ${WEBHOOK_PROXY_URL} + - name: APP_ID + value: ${APP_ID} + - name: APP_NAME + value: ${APP_NAME} + - name: WEBHOOK_SECRET + valueFrom: + secretKeyRef: + key: webhook-secret + name: openshift-build-secret + - name: TEST_COMMAND + value: ${TEST_COMMAND} + - name: JIRA_STAGE_URL + value: ${JIRA_STAGE_URL} + - name: JIRA_USER + value: ${JIRA_USER} + - name: INITIALIZE + value: ${INITIALIZE} + - name: CONFLUENCE_SPACE + value: mock_confluence_space + volumes: + - name: fedmsgd-volume + configMap: + name: fedmsgd + triggers: + - type: ConfigChange + - type: ImageChange + imageChangeParams: + automatic: true + containerNames: + - openshift-build-container + from: + kind: ImageStreamTag + name: openshift-build:latest diff --git a/openshift/openshift.md b/openshift/openshift.md new file mode 100644 index 0000000..0dbda32 --- /dev/null +++ b/openshift/openshift.md @@ -0,0 +1,38 @@ +# OpenShift Deployment +Sync2Jira is designed to be deployed on OpenShift (although it isn't required). We have provided the OpenShift templates +needed to deploy [Sync2Jira](sync2jira-template.yaml) and the [Continuous-Deployment](sync2jira-deploy-template.yaml) +feature of Sync2Jira. + +The templates assumes the following: + +1. You have an ImageStream called 'sync2jira'. It can be created on OpenShift in your project with the following command: + ```shell script + oc create imagestream sync2jira + ``` +2. You have a config map called fedmsgd where you load your config `sync2jira.py` file. + ```shell script + oc create configmap fedmsgd --from-file=sync2jira.py + ``` +3. You deployed `sync2jira-stage-template.yaml` BEFORE `sync2jira-deploy-template.yaml` + +## Continuous Deployment +To use the continuous-deployment feature you have to have service accounts on your stage and namespace. You can create +them and get their token with the following commands: +```shell script +oc create sa sync2jira-deploy +oc policy add-role-to-user edit -z sync2jira-deploy +oc sa get-token sync2jira-deploy +``` +You will then have to set the `INITILIZE` environmental variable in your stage and prod deployment to 0 as you will enable CD + +You will also have to build the image in OpenShift. You will need to pass a URL (RCM_TOOLS_REPO) to a .repo file to +install rhmsg which is what we use to listen for repo changes. + +## OpenShift-Build +Sync2Jira uses [OpenShift-Build](https://github.com/sidpremkumar/OpenShift-Build) to achieve integration tests against +real values. You can use the [openshift-build-template.yml](openshift-build-template.yaml) to deploy an instance of +OpenShift build in your namespace. Make sure to configure your GitHub repo to push checks and pull requests to a +[Smee.io](https://smeel.io) url. See the documentation under [OpenShift-Build](https://github.com/sidpremkumar/OpenShift-Build) +to learn more. + +Note: To deploy OpenShift build you must have the fedmsg.d config map \ No newline at end of file diff --git a/openshift/sync2jira-deploy-template.yaml b/openshift/sync2jira-deploy-template.yaml new file mode 100644 index 0000000..612782c --- /dev/null +++ b/openshift/sync2jira-deploy-template.yaml @@ -0,0 +1,207 @@ +apiVersion: v1 +kind: Template +metadata: + name: sync2jira-deploy-template + app: sync2jira-deploy +labels: + app: sync2jira-deploy +parameters: +- name: CERT + displayName: Cert to be used to access ACTIVEMQ (base64) + required: true +- name: KEY + displayName: Key to be used to access ACTIVEMQ (base64) + required: true +- name: ACTIVEMQ_QUERY + displayName: Query that we should be using + required: true +- name: ACTIVEMQ_URL_1 + displayName: Message Bus URL are tuple, this is the first part of that tuple + required: true +- name: ACTIVEMQ_URL_2 + displayName: Message Bus URL are tuple, this is the second part of that tuple + required: true +- name: ACTIVEMQ_REPO_NAME + displayName: Repo (or topic/category) that we should be listening for + required: true +- name: ENDPOINT + displayName: Openshift endpoint + required: true +- name: NAMESPACE + displayName: Openshift namespace to use + required: true +- name: SEND_EMAILS + displayName: Flag to send emails (0/1) + required: true + value: '1' +- name: RCM_TOOLS_REPO + displayName: RCM Toold Repo URL + required: true +- name: CA_URL + displayName: CA_URL that points to ca_certs + required: true +- name: GITHUB_URL + displayName: GitHub .git URL to our Sync2Jira repo + requiered: true +- name: OPENSHIFT_TOKEN + displayName: OpenShift-prod token to be used to tag new images (base64) + required: true +- name: OPENSHIFT_TOKEN_STAGE + displayName: OpenShift-stage token to be used to tag new images (base64) + required: true +- name: DEFAULT_SERVER + displayName: Default server to use for mailing + required: true +- name: DEFAULT_FROM + displayName: Default from to use for mailing + required: true +objects: +- apiVersion: v1 + kind: ImageStream + metadata: + labels: + app: sync2jira-deploy + annotations: + openshift.io/display-name: sync2jira-deploy + name: sync2jira-deploy + namespace: sync2jira-deploy +- apiVersion: v1 + kind: Secret + metadata: + name: sync2jira-deploy-secrets + labels: + app: sync2jira-deploy + service: sync2jira-deploy + data: + deploy.crt: |- + ${CERT} + deploy.key: |- + ${KEY} + openshift-token: |- + ${OPENSHIFT_TOKEN} + openshift-token-stage: |- + ${OPENSHIFT_TOKEN_STAGE} +- apiVersion: v1 + kind: BuildConfig + metadata: + name: sync2jira-deploy + labels: + app: sync2jira-deploy + service: sync2jira-deploy + spec: + source: + git: + uri: ${GITHUB_URL} + ref: master + strategy: + type: Docker + dockerStrategy: + buildArgs: + - name: RCM_TOOLS_REPO + value: ${RCM_TOOLS_REPO} + dockerfilePath: Dockerfile.deploy + output: + to: + kind: ImageStreamTag + name: sync2jira-deploy:latest + runPolicy: Serial +- apiVersion: v1 + kind: DeploymentConfig + metadata: + name: sync2jira-deploy + labels: + service: sync2jira-deploy + app: sync2jira-deploy + spec: + selector: + service: sync2jira-deploy + template: + metadata: + labels: + app: sync2jira-deploy + service: sync2jira-deploy + spec: + containers: + - name: sync2jira-deploy + image: sync2jira-deploy:latest + imagePullPolicy: Always + volumeMounts: + - name: secrets + mountPath: usr/local/src/sync2jira/docs # Overwrite the doc folder, we don't need it when we're running + readOnly: true + - name: fedmsgd-volume + mountPath: /etc/fedmsg.d + readOnly: true + env: + - name: SEND_EMAILS + value: ${SEND_EMAILS} + - name: CERT + value: usr/local/src/sync2jira/docs/deploy.crt + - name: KEY + value: usr/local/src/sync2jira/docs/deploy.key + - name: CA_CERTS + value: /etc/pki/tls/certs/ca-bundle.crt # Downloaded during docker-entrypoint.sh + - name: ACTIVEMQ_QUERY + value: ${ACTIVEMQ_QUERY} + - name: ACTIVEMQ_URL_1 + value: ${ACTIVEMQ_URL_1} + - name: ACTIVEMQ_URL_2 + value: ${ACTIVEMQ_URL_2} + - name: ACTIVEMQ_REPO_NAME + value: ${ACTIVEMQ_REPO_NAME} + - name: TOKEN + valueFrom: + secretKeyRef: + name: sync2jira-deploy-secrets + key: openshift-token + - name: STAGE_TOKEN + valueFrom: + secretKeyRef: + name: sync2jira-deploy-secrets + key: openshift-token-stage + - name: ENDPOINT + value: ${ENDPOINT} + - name: NAMESPACE + value: ${NAMESPACE} + - name: DEFAULT_FROM + value: ${DEFAULT_FROM} + - name: DEFAULT_SERVER + value: ${DEFAULT_SERVER} + - name: RCM_TOOLS_REPO + value: ${RCM_TOOLS_REPO} + - name: CONFLUENCE_SPACE + value: mock_confluence_space + - name: CA_URL + value: ${CA_URL} + - name: SYNC2JIRA_PNTJIRA_PASS + valueFrom: + secretKeyRef: + key: jira_pnt_pass + name: sync2jira-secret + - name: SYNC2JIRA_OMEGAPRIME_PASS + valueFrom: + secretKeyRef: + key: jira_omegaprime_pass + name: sync2jira-secret + - name: SYNC2JIRA_GITHUB_TOKEN + valueFrom: + secretKeyRef: + key: github_token + name: sync2jira-secret + volumes: + - name: secrets + secret: + secretName: sync2jira-deploy-secrets + - name: fedmsgd-volume + configMap: + name: fedmsgd + triggers: + - type: ConfigChange + - type: ImageChange + imageChangeParams: + automatic: true + containerNames: + - sync2jira-deploy + from: + kind: ImageStreamTag + name: sync2jira-deploy:latest \ No newline at end of file diff --git a/openshift/sync2jira-stage-template.yaml b/openshift/sync2jira-stage-template.yaml new file mode 100644 index 0000000..6e42332 --- /dev/null +++ b/openshift/sync2jira-stage-template.yaml @@ -0,0 +1,162 @@ +apiVersion: v1 +kind: Template +metadata: + name: sync2jira-stage-template + app: sync2jira-stage +labels: + app: sync2jira-stage +parameters: +- name: CA_URL + displayName: CA URL used for sync2jira + required: true +- name: DEFAULT_SERVER + displayName: Default server to use for mailing + required: true +- name: DEFAULT_FROM + displayName: Default from to use for mailing + required: true +- name: USER + displayName: JIRA username + required: true +- name: CONFLUENCE_SPACE + displayName: Confluence space + required: true +- name: CONFLUENCE_PAGE_TITLE + displayName: Confluence page title + required: true +- name: CONFLUENCE_URL + displayName: Confluence URL + required: true +- name: CONFLUENCE_USERNAME + displayName: Confluence username + required: true +- name: INITILIZE + displayName: True/False Initilize our repos on startup + value: "1" +- name: IMAGE_URL + displayName: Image URL:TAG to pull from + value: "quay.io/redhat-aqe/sync2jira:latest" +- name: JIRA_PNT_PASS + displayName: PNT password in base64 + requiered: true +- name: JIRA_OMEGAPRIME_PASS + displayName: Omegaprime password in base64 + requiered: true +- name: SYNC2JIRA_CONFLUENCE_PASS + displayName: Confluence password in base64 + requiered: true +- name: GITHUB_TOKEN + displayName: GitHub token in base64 + requiered: true +objects: +- apiVersion: v1 + kind: ImageStream + metadata: + labels: + app: sync2jira-stage + annotations: + openshift.io/display-name: sync2jira-stage + name: sync2jira-stage + spec: + lookupPolicy: + local: false + tags: + - from: + kind: DockerImage + name: ${IMAGE_URL} + name: latest + referencePolicy: + type: Source +- apiVersion: v1 + kind: Secret + metadata: + name: sync2jira-secret + labels: + app: sync2jira-stage + data: + jira_pnt_pass: |- + ${JIRA_PNT_PASS} + jira_omegaprime_pass: |- + ${JIRA_OMEGAPRIME_PASS} + sync2jira-confluence: |- + ${SYNC2JIRA_CONFLUENCE_PASS} + github_token: |- + ${GITHUB_TOKEN} +- apiVersion: v1 + kind: DeploymentConfig + metadata: + name: sync2jira-stage + labels: + app: sync2jira-stage + spec: + selector: + service: sync2jira-stage + template: + metadata: + labels: + app: sync2jira-stage + service: sync2jira-stage + spec: + containers: + - name: sync2jira-stage-container + image: sync2jira-stage:stage + imagePullPolicy: Always + volumeMounts: + - name: fedmsgd-volume + mountPath: /etc/fedmsg.d + readOnly: true + env: + - name: INITIALIZE + value: ${INITILIZE} + - name: CA_URL + value: ${CA_URL} + - name: DEFAULT_SERVER + value: ${DEFAULT_SERVER} + - name: DEFAULT_FROM + value: ${DEFAULT_FROM} + - name: USER + value: ${USER} + - name: SYNC2JIRA_PNTJIRA_PASS + valueFrom: + secretKeyRef: + key: jira_pnt_pass + name: sync2jira-secret + - name: SYNC2JIRA_OMEGAPRIME_PASS + valueFrom: + secretKeyRef: + key: jira_omegaprime_pass + name: sync2jira-secret + - name: SYNC2JIRA_GITHUB_TOKEN + valueFrom: + secretKeyRef: + key: github_token + name: sync2jira-secret + - name: CONFLUENCE_SPACE + value: ${CONFLUENCE_SPACE} + - name: CONFLUENCE_PAGE_TITLE + value: ${CONFLUENCE_PAGE_TITLE} + - name: CONFLUENCE_URL + value: ${CONFLUENCE_URL} + - name: CONFLUENCE_USERNAME + value: sync2jira-confluence + - name: CONFLUENCE_PASSWORD + valueFrom: + secretKeyRef: + key: sync2jira-confluence + name: sync2jira-secret + - name: INITILIZE + value: ${INITILIZE} + volumes: + - name: fedmsgd-volume + configMap: + name: fedmsgd + triggers: + - type: ImageChange + imageChangeParams: + automatic: true + containerNames: + - sync2jira-stage-container + from: + kind: ImageStreamTag + name: sync2jira-stage:latest + - type: ConfigChange diff --git a/openshift/sync2jira-sync-page-template.yaml b/openshift/sync2jira-sync-page-template.yaml new file mode 100644 index 0000000..60b1378 --- /dev/null +++ b/openshift/sync2jira-sync-page-template.yaml @@ -0,0 +1,176 @@ +apiVersion: v1 +kind: Template +metadata: + name: sync2jira-sync-page-template + app: sync2jira-sync-page +labels: + app: sync2jira-sync-page +parameters: +- name: CA_URL + displayName: CA URL used for sync2jira + required: true +- name: DEFAULT_SERVER + displayName: Default server to use for mailing + required: true +- name: DEFAULT_FROM + displayName: Default from to use for mailing + required: true +- name: USER + displayName: JIRA username + required: true +- name: CONFLUENCE_SPACE + displayName: Confluence space + required: true + value: "mock_confluence_space" +- name: INITIALIZE + displayName: True/False Initialize our repos on startup + value: "0" +- name: IMAGE_URL + displayName: Image URL:TAG to pull from + value: "quay.io/redhat-aqe/sync2jira:sync-page" +- name: JIRA_PNT_PASS + displayName: PNT password in base64 + required: true +- name: JIRA_OMEGAPRIME_PASS + displayName: Omegaprime password in base64 + required: true +- name: GITHUB_TOKEN + displayName: GitHub token in base64 + required: true +- name: PAAS_DOMAIN + displayName: Domain to use for the service + required: true +objects: +- apiVersion: v1 + kind: ImageStream + metadata: + labels: + app: sync2jira-sync-page + annotations: + openshift.io/display-name: sync2jira-sync-page + name: sync2jira-sync-page + spec: + lookupPolicy: + local: false + tags: + - from: + kind: DockerImage + name: ${IMAGE_URL} + name: latest + referencePolicy: + type: Source +- apiVersion: v1 + kind: Secret + metadata: + name: sync2jira-secret + labels: + app: sync2jira-stage + data: + jira_pnt_pass: |- + ${JIRA_PNT_PASS} + jira_omegaprime_pass: |- + ${JIRA_OMEGAPRIME_PASS} + sync2jira-confluence: |- + ${SYNC2JIRA_CONFLUENCE_PASS} + github_token: |- + ${GITHUB_TOKEN} +- apiVersion: v1 + kind: Route + metadata: + name: sync2jira-sync-page + labels: + app: sync2jira-sync-page + spec: + host: sync2jira-page-sync.${PAAS_DOMAIN} + to: + kind: Service + name: sync2jira-sync-page + port: + targetPort: http +- apiVersion: v1 + kind: Service + metadata: + name: sync2jira-sync-page + labels: + app: sync2jira-sync-page + spec: + selector: + service: sync2jira-sync-page + ports: + - name: http + port: 80 + targetPort: http +- apiVersion: v1 + kind: DeploymentConfig + metadata: + name: sync2jira-sync-page + labels: + app: sync2jira-sync-page + spec: + selector: + service: sync2jira-sync-page + template: + metadata: + labels: + app: sync2jira-sync-page + service: sync2jira-sync-page + spec: + containers: + - name: sync2jira-sync-page-container + image: sync2jira-sync-page:latest + imagePullPolicy: Always + ports: + - name: http + containerPort: 5000 + volumeMounts: + - name: fedmsgd-volume + mountPath: /etc/fedmsg.d + readOnly: true + env: + - name: BASE_URL + value: 0.0.0.0 + - name: REDIRECT_URL + value: sync2jira-page-sync.${PAAS_DOMAIN} + - name: INITIALIZE + value: ${INITILIZE} + - name: CA_URL + value: ${CA_URL} + - name: DEFAULT_SERVER + value: ${DEFAULT_SERVER} + - name: DEFAULT_FROM + value: ${DEFAULT_FROM} + - name: USER + value: ${USER} + - name: SYNC2JIRA_PNTJIRA_PASS + valueFrom: + secretKeyRef: + key: jira_pnt_pass + name: sync2jira-secret + - name: SYNC2JIRA_OMEGAPRIME_PASS + valueFrom: + secretKeyRef: + key: jira_omegaprime_pass + name: sync2jira-secret + - name: SYNC2JIRA_GITHUB_TOKEN + valueFrom: + secretKeyRef: + key: github_token + name: sync2jira-secret + - name: CONFLUENCE_SPACE + value: ${CONFLUENCE_SPACE} + - name: INITILIZE + value: ${INITILIZE} + volumes: + - name: fedmsgd-volume + configMap: + name: fedmsgd + triggers: + - type: ImageChange + imageChangeParams: + automatic: true + containerNames: + - sync2jira-sync-page-container + from: + kind: ImageStreamTag + name: sync2jira-sync-page:latest + - type: ConfigChange diff --git a/openshift/sync2jira-template.yaml b/openshift/sync2jira-template.yaml new file mode 100644 index 0000000..535678e --- /dev/null +++ b/openshift/sync2jira-template.yaml @@ -0,0 +1,162 @@ +apiVersion: v1 +kind: Template +metadata: + name: sync2jira-template + app: sync2jira +labels: + app: sync2jira +parameters: +- name: CA_URL + displayName: CA URL used for sync2jira + required: true +- name: DEFAULT_SERVER + displayName: Default server to use for mailing + required: true +- name: DEFAULT_FROM + displayName: Default from to use for mailing + required: true +- name: USER + displayName: JIRA username + required: true +- name: CONFLUENCE_SPACE + displayName: Confluence space + required: true +- name: CONFLUENCE_PAGE_TITLE + displayName: Confluence page title + required: true +- name: CONFLUENCE_URL + displayName: Confluence URL + required: true +- name: CONFLUENCE_USERNAME + displayName: Confluence username + required: true +- name: INITILIZE + displayName: True/False Initilize our repos on startup + value: "1" +- name: IMAGE_URL + displayName: Image URL:TAG to pull from + value: "quay.io/redhat-aqe/sync2jira:latest" +- name: JIRA_PNT_PASS + displayName: PNT password in base64 + requiered: true +- name: JIRA_OMEGAPRIME_PASS + displayName: Omegaprime password in base64 + requiered: true +- name: SYNC2JIRA_CONFLUENCE_PASS + displayName: Confluence password in base64 + requiered: true +- name: GITHUB_TOKEN + displayName: GitHub token in base64 + requiered: true +objects: +- apiVersion: v1 + kind: ImageStream + metadata: + labels: + app: sync2jira + annotations: + openshift.io/display-name: sync2jira + name: sync2jira + spec: + lookupPolicy: + local: false + tags: + - from: + kind: DockerImage + name: ${IMAGE_URL} + name: latest + referencePolicy: + type: Source +- apiVersion: v1 + kind: Secret + metadata: + name: sync2jira-secret + labels: + app: sync2jira + data: + jira_pnt_pass: |- + ${JIRA_PNT_PASS} + jira_omegaprime_pass: |- + ${JIRA_OMEGAPRIME_PASS} + sync2jira-confluence: |- + ${SYNC2JIRA_CONFLUENCE_PASS} + github_token: |- + ${GITHUB_TOKEN} +- apiVersion: v1 + kind: DeploymentConfig + metadata: + name: sync2jira + labels: + app: sync2jira + spec: + selector: + service: sync2jira + template: + metadata: + labels: + app: sync2jira + service: sync2jira + spec: + containers: + - name: sync2jira-container + image: sync2jira:latest + imagePullPolicy: Always + volumeMounts: + - name: fedmsgd-volume + mountPath: /etc/fedmsg.d + readOnly: true + env: + - name: INITIALIZE + value: ${INITILIZE} + - name: CA_URL + value: ${CA_URL} + - name: DEFAULT_SERVER + value: ${DEFAULT_SERVER} + - name: DEFAULT_FROM + value: ${DEFAULT_FROM} + - name: USER + value: ${USER} + - name: SYNC2JIRA_PNTJIRA_PASS + valueFrom: + secretKeyRef: + key: jira_pnt_pass + name: sync2jira-secret + - name: SYNC2JIRA_OMEGAPRIME_PASS + valueFrom: + secretKeyRef: + key: jira_omegaprime_pass + name: sync2jira-secret + - name: SYNC2JIRA_GITHUB_TOKEN + valueFrom: + secretKeyRef: + key: github_token + name: sync2jira-secret + - name: CONFLUENCE_SPACE + value: ${CONFLUENCE_SPACE} + - name: CONFLUENCE_PAGE_TITLE + value: ${CONFLUENCE_PAGE_TITLE} + - name: CONFLUENCE_URL + value: ${CONFLUENCE_URL} + - name: CONFLUENCE_USERNAME + value: sync2jira-confluence + - name: CONFLUENCE_PASSWORD + valueFrom: + secretKeyRef: + key: sync2jira-confluence + name: sync2jira-secret + - name: INITILIZE + value: ${INITILIZE} + volumes: + - name: fedmsgd-volume + configMap: + name: fedmsgd + triggers: + - type: ImageChange + imageChangeParams: + automatic: true + containerNames: + - sync2jira-container + from: + kind: ImageStreamTag + name: sync2jira:latest + - type: ConfigChange diff --git a/setup.py b/setup.py index a5f854a..c46821e 100644 --- a/setup.py +++ b/setup.py @@ -15,20 +15,25 @@ # License along with sync2jira; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110.15.0 USA # -# Authors: Ralph Bean , Sid Premkumar +# Authors: Ralph Bean from setuptools import setup import os with open('requirements.txt', 'rb') as f: install_requires = f.read().decode('utf-8').split('\n') + if not os.getenv('READTHEDOCS'): + install_requires.append('requests-kerberos') + +with open('test-requirements.txt', 'rb') as f: + test_requires = f.read().decode('utf-8').split('\n') setup( - name='webhook-sync2jira', + name='sync2jira', version=2.0, - description="Sync Github issues to JIRA, via Github Webhooks", - author='Sid Premkumar', - author_email='sid@bastionzero.com', - url='https://github.com/cwcrypto/WebHookSync2Jira', + description="Sync pagure and github issues to jira, via fedmsg", + author='Ralph Bean', + author_email='rbean@redhat.com', + url='https://pagure.io/sync-to-jira', license='LGPLv2+', classifiers=[ "Development Status :: 5 - Production/Stable", @@ -39,6 +44,8 @@ "Programming Language :: Python :: 3", ], install_requires=install_requires, + tests_require=test_requires, + test_suite='nose.collector', packages=[ 'sync2jira', ], @@ -47,6 +54,8 @@ entry_points={ 'console_scripts': [ "sync2jira=sync2jira.main:main", + "sync2jira-list-managed-urls=sync2jira.main:list_managed", + "sync2jira-close-duplicates=sync2jira.main:close_duplicates", ], }, ) diff --git a/sync-page/__init__.py b/sync-page/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sync-page/assets/font.css b/sync-page/assets/font.css new file mode 100644 index 0000000..c795562 --- /dev/null +++ b/sync-page/assets/font.css @@ -0,0 +1,162 @@ + + @font-face { + font-family: "overpass"; + font-style: normal; + font-weight: 200; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.eot"); + /* IE9 Compat Modes */ + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.ttf") format("truetype"); + /* Safari, Android, iOS */ + } + + @font-face { + font-family: "overpass"; + font-style: italic; + font-weight: 200; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: normal; + font-weight: 300; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: italic; + font-weight: 300; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: normal; + font-weight: 400; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: italic; + font-weight: 400; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: normal; + font-weight: 500; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: italic; + font-weight: 500; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: normal; + font-weight: 600; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: italic; + font-weight: 600; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: normal; + font-weight: 700; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: italic; + font-weight: 700; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: normal; + font-weight: 800; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: italic; + font-weight: 800; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: normal; + font-weight: 900; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass"; + font-style: italic; + font-weight: 900; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass-mono"; + font-style: normal; + font-weight: 300; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass-mono"; + font-style: normal; + font-weight: 400; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass-mono"; + font-style: normal; + font-weight: 500; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.ttf") format("truetype"); + } + + @font-face { + font-family: "overpass-mono"; + font-style: normal; + font-weight: 600; + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.eot"); + src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.ttf") format("truetype"); + } diff --git a/sync-page/assets/redhat-favicon.ico b/sync-page/assets/redhat-favicon.ico new file mode 100644 index 0000000..9072de7 Binary files /dev/null and b/sync-page/assets/redhat-favicon.ico differ diff --git a/sync-page/event-handler.py b/sync-page/event-handler.py new file mode 100644 index 0000000..43c7003 --- /dev/null +++ b/sync-page/event-handler.py @@ -0,0 +1,76 @@ +# Build-In Modules +import logging +import os + +# 3rd Party Modules +from flask import Flask, render_template, request, redirect + +# Local Modules +from sync2jira.main import load_config, initialize_pr, initialize_issues + +# Global Variables +app = Flask(__name__, static_url_path = "/assets", static_folder = "assets") +BASE_URL = os.environ['BASE_URL'] +REDIRECT_URL = os.environ['REDIRECT_URL'] +config = load_config() + +# Set up our logging +FORMAT = "[%(asctime)s] %(levelname)s: %(message)s" +logging.basicConfig(format=FORMAT, level=logging.INFO) +logging.basicConfig(format=FORMAT, level=logging.DEBUG) +logging.basicConfig(format=FORMAT, level=logging.WARNING) +log = logging.getLogger('sync2jira-sync-page') + + +@app.route('/handle-event', methods=['POST']) +def handle_event(): + """ + Handler for when a user wants to sync a repo + """ + response = request.form + synced_repos = [] + for repo_name, switch in response.items(): + if switch == "on": + # Sync repo_name + log.info(f"Starting sync for repo: {repo_name}") + initialize_issues(config, repo_name=repo_name) + initialize_pr(config, repo_name=repo_name) + synced_repos.append(repo_name) + if synced_repos: + return render_template('sync-page-success.jinja', + synced_repos=synced_repos, + url=f"http://{REDIRECT_URL}") + else: + return render_template('sync-page-failure.jinja', + url=f"http://{REDIRECT_URL}") + + +@app.route('/', methods=['GET']) +def index(): + """ + Return relevant redirect + """ + return redirect("/github") + +@app.route('/github', methods=['GET']) +def github(): + """ + Github Sync Page + """ + # Build and return our updated HTML page + return render_template('sync-page-github.jinja', + github=config['sync2jira']['map']['github'], + url=f"http://{REDIRECT_URL}") + +@app.route('/pagure', methods=['GET']) +def pagure(): + """ + Pagure Sync Page + """ + # Build and return our updated HTML page + return render_template('sync-page-pagure.jinja', + pagure=config['sync2jira']['map']['pagure'], + url=f"http://{REDIRECT_URL}") + +if __name__ == '__main__': + app.run(host=BASE_URL) diff --git a/sync-page/templates/sync-page-failure.jinja b/sync-page/templates/sync-page-failure.jinja new file mode 100644 index 0000000..5f64b0d --- /dev/null +++ b/sync-page/templates/sync-page-failure.jinja @@ -0,0 +1,70 @@ + + + + + + + + + + + Sync2Jira - Sync Page + + + +
+ +
+
+ +
+
+
+
+
+

Failed to started syncing

+

Make sure you selected at least one repo!

+
+
+
+
+
+
+ +
+
+
+
+
+
+ + diff --git a/sync-page/templates/sync-page-github.jinja b/sync-page/templates/sync-page-github.jinja new file mode 100644 index 0000000..fd9dbbf --- /dev/null +++ b/sync-page/templates/sync-page-github.jinja @@ -0,0 +1,77 @@ + + + + + + + + + + + Sync2Jira - Sync Page + + +
+ +
+
+ +
+
+
+
+
+

GitHub Sync

+

Select the repos you would like to be re-syned!

+
+
+
+
+ {% for repo_name, info in github.items() %} +
+
+
+ + +
+
+
+ {% endfor %} +
+
+
+ +
+
+
+
+
+
+
+ + diff --git a/sync-page/templates/sync-page-pagure.jinja b/sync-page/templates/sync-page-pagure.jinja new file mode 100644 index 0000000..0038ff4 --- /dev/null +++ b/sync-page/templates/sync-page-pagure.jinja @@ -0,0 +1,77 @@ + + + + + + + + + + + Sync2Jira - Sync Page + + +
+ +
+
+ +
+
+
+
+
+

Pagure Sync

+

Select the repos you would like to be re-synced!

+
+
+
+
+ {% for repo_name, info in pagure.items() %} +
+
+
+ + +
+
+
+ {% endfor %} +
+
+
+ +
+
+
+
+
+
+
+ + diff --git a/sync-page/templates/sync-page-success.jinja b/sync-page/templates/sync-page-success.jinja new file mode 100644 index 0000000..440ee78 --- /dev/null +++ b/sync-page/templates/sync-page-success.jinja @@ -0,0 +1,74 @@ + + + + + + + + + + + Sync2Jira - Sync Page + + + +
+ +
+
+ +
+
+
+
+
+

Successfully started syncing the following repos:

+
    + {% for repo_name in synced_repos %} +
  • {{ repo_name }}
  • + {% endfor %} +
+
+
+
+
+
+
+ +
+
+
+
+
+
+ + diff --git a/sync2jira/confluence_client.py b/sync2jira/confluence_client.py new file mode 100644 index 0000000..51e19be --- /dev/null +++ b/sync2jira/confluence_client.py @@ -0,0 +1,245 @@ +#!/usr/bin/python3 +""" +This script acts as a client to confluence, connects to confluence and create +pages +""" +import logging +import os +import re +import requests +from requests.auth import HTTPBasicAuth +import jinja2 +import datetime + +# Global Variables +log = logging.getLogger('sync2jira') + + +class ConfluenceClient: + + """ A conflence component used to connect to confluence and perform + confluence related tasks + """ + + def __init__( + self, + confluence_space=os.environ.get("CONFLUENCE_SPACE"), + confluence_page_title=os.environ.get("CONFLUENCE_PAGE_TITLE"), + confluence_url=os.environ.get("CONFLUENCE_URL"), + username=os.environ.get("CONFLUENCE_USERNAME"), + password=os.environ.get("CONFLUENCE_PASSWORD"), + auth_type="basic", + ): + """ Returns confluence client object + :param string confluence_space : space to be used in confluence + :param string confluence_page_title : Title of page to be created in + confluence + :param string confluence_url : url to connect confluence + :param string username : optional username for basic auth + :param string password : optional password for basic auth + :param string auth_type : indicate auth scheme (basic/kerberos) + """ + self.confluence_space = confluence_space + self.confluence_page_title = confluence_page_title + self.confluence_url = confluence_url + self.confluence_rest_url = self.confluence_url + "/rest/api/content/" + self.username = username + self.password = password + self.authtype = auth_type + self.update_stat = False + self._req_kwargs = None + + # Find our page ID and save it + resp = self.find_page() + if not resp: + raise ValueError("Invalid page name") + self.page_id = resp + + def update_stat_value(self, new_value): + """ Update the 'update_stat' attribute. + :param Bool new_value: Bool value + """ + self.update_stat = new_value + + @property + def req_kwargs(self): + """ Set the key-word arguments for python-requests depending on the + auth type. This code should run on demand exactly once, which is + why it is a property. + :return dict _req_kwargs: dict with the right options to pass in + """ + if self._req_kwargs is None: + if self.authtype == "basic": + self._req_kwargs = {"auth": self.get_auth_object()} + return self._req_kwargs + + def update_stat_page(self, confluence_data): + """ + Updates the statistic page with more data + :param dict confluence_data: Variable amount of new data + """ + try: + # Get the HTML to update + page_info = self.get_page_info(self.page_id) + page_html = page_info['body']['storage']['value'] + # Maintain and update our final data + confluence_data_update = { + 'Created Issues': 0, + 'Descriptions': 0, + 'Comments': 0, + 'Reporters': 0, + 'Status': 0, + 'Assignees': 0, + 'Transitions': 0, + 'Title': 0, + 'Tags': 0, + 'FixVersion': 0, + 'Misc. Fields': 0, + 'Total': 0 + } + confluence_data_times = { + 'Created Issues': 60, + 'Descriptions': 30, + 'Comments': 30, + 'Reporters': 30, + 'Assignees': 15, + 'Status': 30, + 'Transitions': 30, + 'Title': 15, + 'Tags': 10, + 'FixVersion': 10, + 'Misc. Fields': 15, + } + # Use these HTML patterns to search for previous values + confluence_html_patterns = { + 'Created Issues': "Created Issues", + 'Descriptions': "Descriptions", + 'Comments': "Comments", + 'Reporters': "Reporters", + 'Assignees': "Assignees", + 'Status': "Status", + 'Transitions': "Transitions", + 'Title': "Titles", + 'Tags': "Tags", + 'FixVersion': "Fix Version", + 'Misc. Fields': "Misc. Fields", + } + # Update all our data + total = 0 + for topic, html in confluence_html_patterns.items(): + # Search for previous data + try: + ret = re.search(html, page_html) + start_index = ret.span()[1] + new_val = "" + while page_html[start_index] != "<": + new_val += page_html[start_index] + start_index += 1 + confluence_data_update[topic] = int(new_val) + total += int(new_val) + except AttributeError: + log.warning(f"Confluence failed on parsing {topic}") + total += 0 + confluence_data_update[topic] = 0 + + # Now add new data + for topic in confluence_html_patterns.keys(): + if topic in confluence_data: + confluence_data_update[topic] += confluence_data[topic] + total += confluence_data[topic] + confluence_data_update["Total"] = total + + # Calculate Total Time + total_time = 0 + for topic in confluence_data_times.keys(): + total_time += confluence_data_update[topic] * confluence_data_times[topic] + total_time = datetime.timedelta(seconds=total_time) + confluence_data_update["Total Time"] = str(total_time) + " (HR:MIN:SEC)" + + # Build our updated HTML page + templateLoader = jinja2.FileSystemLoader( + searchpath='usr/local/src/sync2jira/sync2jira/') + templateEnv = jinja2.Environment(loader=templateLoader) + template = templateEnv.get_template('confluence_stat.jinja') + html_text = template.render(confluence_data=confluence_data_update) + + # Finally update our page + if html_text.replace(" ", "") != page_html.replace(" ", ""): + self.update_page(self.page_id, html_text) + except: # noqa E722 + log.exception("Something went wrong updating confluence!") + + def find_page(self): + """ finds the page with confluence_page_title in confluence_space + return string page_id : id of the page if found, otherwise None + """ + search_url = ( + self.confluence_url + + "/rest/api/content/search?cql=title='" + + self.confluence_page_title + + "' and " + + "space=" + + self.confluence_space + ) + resp = requests.get(search_url, **self.req_kwargs) + if len(resp.json()["results"]) > 0: + return resp.json()["results"][0].get("id", None) + else: + return None + + def get_page_info(self, page_id): + """Gives information like ancestors,version of a page + :param string page_id: id of the confluence page + :return json conf_resp: response from the confluence + """ + conf_rest_url = ( + self.confluence_url + + "/rest/api/content/" + + page_id + + "?expand=ancestors,version,body.storage" + ) + resp = requests.get(conf_rest_url, **self.req_kwargs) + return resp.json() + + def update_page(self, page_id, html_str): + """ + Updates the page with id page_id + :param string page_id: id of the page + :param string html_str : html_str content of the page + :return json conf_resp: response from the confluence + """ + rest_url = self.confluence_rest_url + page_id + info = self.get_page_info(page_id) + updated_page_version = int(info["version"]["number"] + 1) + + data = { + "id": str(page_id), + "type": "page", + "title": info["title"], + "version": {"number": updated_page_version}, + "body": {"storage": {"representation": "storage", "value": html_str}}, + } + resp = requests.put(rest_url, json=data, **self.req_kwargs) + if not resp.ok: + log.error("Error updating confluence page!\nConfluence response: %s\n", resp.json()) + + return resp.json() + + def get_auth_object(self): + """Returns Auth object based on auth type + :return : Auth Object + """ + if self.authtype == "basic": + return HTTPBasicAuth(self.username, self.password) + + +if os.environ.get('CONFLUENCE_SPACE') != 'mock_confluence_space': + confluence_client = ConfluenceClient() +else: + # Else we are testing, and create a mock_client + class mock_confluence_client(object): + mock_data = False + update_stat = False + def update_stat_value(self, **kwargs): return + def update_stat_page(self, **kwargs): return + confluence_client = mock_confluence_client() diff --git a/sync2jira/confluence_stat.jinja b/sync2jira/confluence_stat.jinja new file mode 100644 index 0000000..9c9d55b --- /dev/null +++ b/sync2jira/confluence_stat.jinja @@ -0,0 +1,55 @@ +

+
+

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Type of SyncNumber of SyncsAvg. Time (Seconds)
Created Issues{{ confluence_data['Created Issues'] }}60
Descriptions{{ confluence_data['Descriptions'] }}30
Comments{{ confluence_data['Comments'] }}30
Reporters{{ confluence_data['Reporters'] }}30
Assignees{{ confluence_data['Assignees'] }}15
Status{{ confluence_data['Status'] }}30
Transitions{{ confluence_data['Transitions'] }}30
Titles{{ confluence_data['Title'] }}15
Tags{{ confluence_data['Tags'] }}5
Fix Version{{ confluence_data['FixVersion'] }}5
Misc. Fields{{ confluence_data['Misc. Fields'] }}15
Total{{ confluence_data['Total'] }}{{ confluence_data['Total Time'] }}
+

+
+

\ No newline at end of file diff --git a/sync2jira/downstream_issue.py b/sync2jira/downstream_issue.py index b05e783..133066e 100644 --- a/sync2jira/downstream_issue.py +++ b/sync2jira/downstream_issue.py @@ -33,6 +33,8 @@ # Local Modules from sync2jira.intermediary import Issue, PR +from sync2jira.mailer import send_mail +from sync2jira.confluence_client import confluence_client # The date the service was upgraded # This is used to ensure legacy comments are not touched @@ -40,10 +42,29 @@ log = logging.getLogger('sync2jira') +remote_link_title = "Upstream issue" duplicate_issues_subject = 'FYI: Duplicate Sync2jira Issues' jira_cache = {} + +def check_jira_status(client): + """ + Function tests the status of the JIRA server. + + + :param jira.client.JIRA client: JIRA client + :return: True/False if the server is up + :rtype: Bool + """ + # Search for any issue remote title + ret = client.search_issues("issueFunction in linkedIssuesOfRemote('*')") + if len(ret) < 1: + # If we did not find anything return false + return False + return True + + def _comment_format(comment): """ Function to format JIRA comments. @@ -57,6 +78,20 @@ def _comment_format(comment): comment['id'], comment['author'], pretty_date, comment['body']) +def _comment_format_legacy(comment): + """ + Legacy function to format JIRA comments. + This is still used to match comments so no + duplicates are created. + + :param dict comment: Upstream comment + :returns: Comments formatted + :rtype: String + """ + return "Upstream, %s wrote:\n\n{quote}\n%s\n{quote}" % ( + comment['name'], comment['body']) + + def get_jira_client(issue, config): """ Function to match and create JIRA client. @@ -84,7 +119,7 @@ def get_jira_client(issue, config): if not jira_instance: log.error("No jira_instance for issue and there is no default in the config") raise Exception - + client = jira.client.JIRA(**config['sync2jira']['jira'][jira_instance]) return client @@ -100,10 +135,12 @@ def _matching_jira_issue_query(client, issue, config, free=False): :returns: results: Returns a list of matching JIRA issues if any are found :rtype: List """ - # Searches for any remote link to the issue.url\ - issue_title = issue.title.replace('[', '').replace(']', '') - query = f'summary ~ "{issue_title}"' - + # Searches for any remote link to the issue.url + query = 'issueFunction in linkedIssuesOfRemote("%s") and ' \ + 'issueFunction in linkedIssuesOfRemote("%s")' % ( + remote_link_title, issue.url) + if free: + query += ' and statusCategory != Done' # Query the JIRA client and store the results results_of_query = client.search_issues(query) if len(results_of_query) > 1: @@ -150,10 +187,122 @@ def _matching_jira_issue_query(client, issue, config, free=False): # Return the final_results log.debug("Found %i results for query %r", len(final_results), query) + # Alert the owner + if issue.downstream.get('owner'): + alert_user_of_duplicate_issues(issue, final_results, + results_of_query, + config, client) return final_results else: return results_of_query + +def alert_user_of_duplicate_issues(issue, final_result, results_of_query, + config, client): + """ + Alerts owner of duplicate downstream issues. + + :param sync2jira.intermediate.Issue issue: Upstream Issue object + :param List final_result: Issue selected by matching algorithm + :param List results_of_query: Result of JQL query + :param Dict config: Config dict + :param jira.client.JIRA client: JIRA client + :returns: Nothing + """ + # First remove final_result from results_of_query + results_of_query.remove(final_result[0]) + + # Check that all duplicate issues are closed + updated_results = [] + for result in results_of_query: + if result.fields.status.name != 'Closed': + updated_results.append(result) + if not updated_results: + # Nothing to alert the owner of + return + + # Get base URL + jira_instance = issue.downstream.get('jira_instance', False) + if not jira_instance: + jira_instance = config['sync2jira'].get('default_jira_instance', False) + if not jira_instance: + log.error("No jira_instance for issue and there is no default in the config") + raise Exception + base_url = config['sync2jira']['jira'][jira_instance]['options']['server'] + '/browse/' + + # Format the updated results + template_ready = [] + for update in updated_results: + url = base_url + update.key + new_entry = {'url': url, 'title': update.key} + template_ready.append(new_entry) + + # Get owner name and email from Jira + ret = client.search_users(issue.downstream.get('owner')) + if len(ret) > 1: + log.warning('Found multiple users for username %s' % issue.downstream.get('owner')) + found = False + for person in ret: + if person.key == issue.downstream.get('owner'): + ret = [person] + found = True + break + if not found: + log.warning('Could not find JIRA user for username %s' % issue.downstream.get('owner')) + if not ret: + message = 'No owner could be found for username %s' % issue.downstream.get('owner') + log.warning(message.strip()) + return + + user = {'name': ret[0].displayName, 'email': ret[0].emailAddress} + + # Format selected issue + selected_issue = {'url': base_url + final_result[0].key, + 'title': final_result[0].key} + + # Get admin information + admins = [] + admin_template = [] + for admin in config['sync2jira']['admins']: + admin_username = [name for name in admin][0] + ret = client.search_users(admin_username) + if len(ret) > 1: + log.warning('Found multiple users for admin %s' % list(admin.keys())[0]) + found = False + for person in ret: + if person.key == issue.downstream.get('owner'): + ret = [person] + found = True + break + if not found: + log.warning('Could not find JIRA user for admin %s' % list(admin.keys())[0]) + if not ret: + message = 'No admin could be found for username %s' % list(admin.keys())[0] + log.warning(message.strip()) + raise ValueError(message) + admins.append(ret[0].emailAddress) + admin_template.append({'name': ret[0].displayName, 'email': ret[0].emailAddress}) + + # Create and send email + templateLoader = jinja2.FileSystemLoader( + searchpath='usr/local/src/sync2jira/sync2jira/') + templateEnv = jinja2.Environment(loader=templateLoader) + template = templateEnv.get_template('email_template.jinja') + html_text = template.render(user=user, + admins=admin_template, + issue=issue, + selected_issue=selected_issue, + duplicate_issues=template_ready) + + # Send mail + send_mail(recipients=[user['email']], + cc=admins, + subject=duplicate_issues_subject, + text=html_text) + log.info('Alerted %s about %s duplicate issue(s)' % + (user['email'], len(template_ready))) + + def find_username(issue, config): """ Finds JIRA username for an issue object. @@ -203,7 +352,12 @@ def _find_comment_in_jira(comment, j_comments): :rtype: jira.resource.Comment/None """ formatted_comment = _comment_format(comment) + legacy_formatted_comment = _comment_format_legacy(comment) for item in j_comments: + if item.raw['body'] == legacy_formatted_comment: + # If the comment is in the legacy comment format + # return the item + return item if str(comment['id']) in item.raw['body']: # The comment id's match, if they dont have the same body, # we need to edit the comment @@ -258,6 +412,28 @@ def _get_existing_jira_issue(client, issue, config): return None +def _get_existing_jira_issue_legacy(client, issue, config): + """ + This is our old way of matching issues: use the special url field. + This will be phased out and removed in a future release. + + """ + + kwargs = dict(issue.downstream.items()) + kwargs["External issue URL"] = "%s" % issue.url + kwargs = sorted(kwargs.items(), key=operator.itemgetter(0)) + + query = " AND ".join([ + "=".join(["'%s'" % k, "'%s'" % v]) for k, v in kwargs + if v is not None + ]) + " AND (resolution is null OR resolution = Duplicate)" + results = client.search_issues(query) + if results: + return results[0] + else: + return None + + def attach_link(client, downstream, remote_link): """ Attaches the upstream link to the JIRA ticket. @@ -269,11 +445,7 @@ def attach_link(client, downstream, remote_link): :rtype: jira.resources.Issue """ log.info("Attaching tracking link %r to %r", remote_link, downstream.key) - if (downstream.fields.description is None): - previous_description = "" - else: - previous_description = downstream.fields.description - modified_desc = previous_description + " " + modified_desc = downstream.fields.description + " " # This is crazy. Querying for application links requires admin perms which # we don't have, so duckpunch the client to think it has already made the @@ -287,11 +459,69 @@ def attach_link(client, downstream, remote_link): # gets re-indexed, otherwise our searches won't work. Also, Handle some # weird API changes here... log.debug("Modifying desc of %r to trigger re-index.", downstream.key) - # downstream.update({'description': modified_desc}) + downstream.update({'description': modified_desc}) return downstream +def _upgrade_jira_issue(client, downstream, issue, config): + """ + Given an old legacy-style downstream issue... + ...upgrade it to a new-style issue. + Simply mark it with an external-url field value. + """ + log.info("Upgrading %r %r issue for %r", downstream.key, issue.downstream, issue) + if config['sync2jira']['testing']: + log.info("Testing flag is true. Skipping actual upgrade.") + return + + # Do it! + remote_link = dict(url=issue.url, title=remote_link_title) + attach_link(client, downstream, remote_link) + + +def assign_user(client, issue, downstream, remove_all=False): + """ + Attempts to assigns a JIRA issue to the correct + user based on the issue. + + :param jira.client.JIRA client: JIRA Client + :param sync2jira.intermediary.Issue issue: Issue object + :param jira.resources.Issue downstream: JIRA issue object + :param Bool remove_all: Flag to indicate if we should reset the assignees in the JIRA issue + :returns: Nothing + """ + # If removeAll flag, then we need to reset the assignees + if remove_all: + # Update the issue to have no assignees + downstream.update(assignee={'name': ''}) + # Then we're done! And we can go back ! + return + + # JIRA only supports one assignee + # If we have more than one assignee (i.e. from Github) + # assign the issue to the first user (i.e. issue.assignee[0]) + + # First we need to find the user + # Make API call to get a list of users + users = client.search_assignable_users_for_issues( + issue.assignee[0]['fullname'], + project=issue.downstream['project']) + # Loop through the query + for user in users: + if user.displayName == issue.assignee[0]['fullname']: + # Then we can assign the issue to the user + downstream.update({'assignee': {'name': user.key}}) + return + # If there is an owner, assign it to them + if issue.downstream.get('owner'): + client.assign_issue(downstream.id, issue.downstream.get('owner')) + log.warning('Assigned %s to owner: %s' % + (issue.title, issue.downstream.get('owner'))) + return + log.warning('Was not able to assign user %s' % issue.assignee[0]['fullname']) + + def change_status(client, downstream, status, issue): """ Change status of JIRA issue. @@ -312,6 +542,9 @@ def change_status(client, downstream, status, issue): try: client.transition_issue(downstream, id) log.info('Updated downstream to %s status for issue %s' % (status, issue.title)) + if confluence_client.update_stat: + confluence_data = {'Transition': 1} + confluence_client.update_stat_page(confluence_data) except JIRAError: log.error('Updating downstream issue failed for %s: %s' % (status, issue.title)) else: @@ -335,17 +568,14 @@ def _create_jira_issue(client, issue, config): return custom_fields = issue.downstream.get('custom_fields', {}) + default_type = issue.downstream.get('type', "Bug") - # Determine the type of issue based on the tags available - issue_type = 'Bug' - if ('story' in issue.tags): - issue_type = 'Story' - elif ('task' in issue.tags): - issue_type = 'Task' + confluence_data = {'Misc. Fields': 0, 'Created Issues': 1} # Build the description of the JIRA issue if 'description' in issue.downstream.get('issue_updates', {}): description = "Upstream description: {quote}%s{quote}" % issue.content + confluence_data['Descriptions'] = 1 else: description = '' @@ -353,6 +583,16 @@ def _create_jira_issue(client, issue, config): # Just add it to the top of the description formatted_status = "Upstream issue status: %s" % issue.status description = formatted_status + '\n' + description + confluence_data['Status'] = 1 + + if issue.reporter: + # Add to the description + description = '[%s] Upstream Reporter: %s \n %s' % ( + issue.id, + issue.reporter['fullname'], + description + ) + confluence_data['Reporters'] = 1 # Add the url if requested if 'url' in issue.downstream.get('issue_updates', {}): @@ -361,9 +601,8 @@ def _create_jira_issue(client, issue, config): kwargs = dict( summary=issue.title, description=description, - issuetype=dict(name=issue_type), + issuetype=dict(name="Story" if "RFE" in issue.title else default_type), ) - if issue.downstream['project']: kwargs['project'] = dict(key=issue.downstream['project']) if issue.downstream.get('component'): @@ -379,23 +618,71 @@ def _create_jira_issue(client, issue, config): # Add labels if needed if 'labels' in issue.downstream.keys(): kwargs['labels'] = issue.downstream['labels'] - - jira_username = get_jira_username_from_github(config, issue.reporter['fullname']) - kwargs['reporter'] = {'id': jira_username} log.info("Creating issue.") downstream = client.create_issue(**kwargs) - remote_link = dict(url=issue.url, title=f"[Issue] {issue.title}") + # Add Epic link, QA, EXD-Service field if present + if issue.downstream.get('epic-link') or \ + issue.downstream.get('qa-contact') or \ + issue.downstream.get('EXD-Service'): + # Fetch all fields + all_fields = client.fields() + # Make a map from field name -> field id + name_map = {field['name']: field['id'] for field in all_fields} + if issue.downstream.get('epic-link'): + # Try to get and update the custom field + custom_field = name_map.get('Epic Link', None) + if custom_field: + try: + downstream.update({custom_field: issue.downstream.get('epic-link')}) + except JIRAError: + client.add_comment(downstream, f"Error adding Epic-Link: {issue.downstream.get('epic-link')}") + confluence_data['Misc. Fields'] += 1 + if issue.downstream.get('qa-contact'): + # Try to get and update the custom field + custom_field = name_map.get('QA Contact', None) + if custom_field: + downstream.update({custom_field: issue.downstream.get('qa-contact')}) + confluence_data['Misc. Fields'] += 1 + if issue.downstream.get('EXD-Service'): + # Try to update the custom field + exd_service_info = issue.downstream.get('EXD-Service') + custom_field = name_map.get('EXD-Service', None) + if custom_field: + try: + downstream.update( + {custom_field: {"value": f"{exd_service_info['guild']}", + "child": {"value": f"{exd_service_info['value']}"}}}) + except JIRAError: + client.add_comment(downstream, + f"Error adding EXD-Service field.\n" + f"Project: {exd_service_info['guild']}\n" + f"Value: {exd_service_info['value']}") + confluence_data['Misc. Fields'] += 1 + + # Add upstream issue ID in comment if required + if 'upstream_id' in issue.downstream.get('issue_updates', []): + comment = f"Creating issue for " \ + f"[{issue.upstream}-#{issue.upstream_id}|{issue.url}]" + client.add_comment(downstream, comment) + confluence_data['Misc. Fields'] = 1 + + remote_link = dict(url=issue.url, title=remote_link_title) attach_link(client, downstream, remote_link) default_status = issue.downstream.get('default_status', None) if default_status is not None: change_status(client, downstream, default_status, issue) + confluence_data['Transitions'] = 1 + + # Update Confluence Page + if confluence_client.update_stat: + confluence_client.update_stat_page(confluence_data) # Update relevant information (i.e. tags, assignees etc.) if the # User opted in - _update_jira_issue(downstream, issue, client, config) + _update_jira_issue(downstream, issue, client) return downstream @@ -417,7 +704,7 @@ def _label_matching(jira_labels, issue_labels): return updated_labels -def _update_jira_issue(existing, issue, client, config): +def _update_jira_issue(existing, issue, client): """ Updates an existing JIRA issue (i.e. tags, assignee, comments etc). @@ -456,7 +743,7 @@ def _update_jira_issue(existing, issue, client, config): # Only synchronize assignee for listings that op-in if any('assignee' in item for item in updates): log.info("Looking for new assignee(s)") - _update_assignee(client, existing, issue, updates, config) + _update_assignee(client, existing, issue, updates) # Only synchronize descriptions for listings that op-in if 'description' in updates: @@ -508,6 +795,9 @@ def _update_url(existing, issue): data = {'description': new_description} existing.update(data) log.info('Updated description') + if confluence_client.update_stat: + confluence_data = {'Misc. Fields': 1} + confluence_client.update_stat_page(confluence_data) def _update_transition(client, existing, issue): @@ -519,6 +809,7 @@ def _update_transition(client, existing, issue): :param sync2jira.intermediary.Issue issue: Upstream issue :returns: Nothing """ + confluence_data = {} # Update the issue status in the JIRA description # Format the status formatted_status = "Upstream issue status: %s" % issue.status @@ -555,6 +846,9 @@ def _update_transition(client, existing, issue): data = {'description': new_description} existing.update(data) log.info('Updated transition') + confluence_data['Status'] = 1 + if confluence_client.update_stat and confluence_data: + confluence_client.update_stat_page(confluence_data) # If the user just inputted True, only update the description # If the user added a custom closed status, attempt to close the @@ -624,6 +918,9 @@ def _update_title(issue, existing): data = {'summary': issue.title} existing.update(data) log.info('Updated title') + if confluence_client.update_stat: + confluence_data = {'Title': 1} + confluence_client.update_stat_page(confluence_data) def _update_comments(client, existing, issue): @@ -646,6 +943,9 @@ def _update_comments(client, existing, issue): client.add_comment(existing, comment_body) if len(comments_d) > 0: log.info("Comments synchronization done on %i comments." % len(comments_d)) + if confluence_client.update_stat: + confluence_data = {'Comments': len(comments_d)} + confluence_client.update_stat_page(confluence_data) def _update_fixVersion(updates, existing, issue, client): @@ -698,13 +998,16 @@ def _update_fixVersion(updates, existing, issue, client): try: existing.update(data) log.info('Updated %s fixVersion(s)' % len(fix_version)) + if confluence_client.update_stat: + confluence_data = {'FixVersion': len(fix_version)} + confluence_client.update_stat_page(confluence_data) except JIRAError: log.warning('Error updating the fixVersion. %s is an invalid fixVersion.' % issue.fixVersion) # Add a comment to indicate there was an issue client.add_comment(existing, f"Error updating fixVersion: {issue.fixVersion}") -def _update_assignee(client, existing, issue, updates, config): +def _update_assignee(client, existing, issue, updates): """ Helper function update existing JIRA assignee from downstream issue. @@ -712,12 +1015,8 @@ def _update_assignee(client, existing, issue, updates, config): :param jira.resource.Issue existing: Existing JIRA issue :param sync2jira.intermediary.Issue issue: Upstream issue :param List updates: Downstream updates requested by the user - :param dict config: Config dict :returns: Nothing """ - if not issue.assignee: - return - # First check if overwrite is set to True try: # For python 3 > @@ -726,18 +1025,11 @@ def _update_assignee(client, existing, issue, updates, config): # for python 2.7 overwrite = bool((filter(lambda d: "assignee" in d, updates))[0]['assignee']['overwrite']) - # First find our mapped user in JIRA if they exist, else just quit - mapped_jira_id = config['mapping'][issue.assignee[0].name]['jira'] - - if not mapped_jira_id: - log.warn('Could not update assignee') - return - # First check if the issue is already assigned to the same person update = False if issue.assignee and issue.assignee[0]: try: - update = mapped_jira_id != existing.fields.assignee.key + update = issue.assignee[0]['fullname'] != existing.fields.assignee.displayName except AttributeError: update = True @@ -746,20 +1038,30 @@ def _update_assignee(client, existing, issue, updates, config): # And the issue has an assignee if not existing.fields.assignee and issue.assignee: if issue.assignee[0] and update: - existing.update({'assignee': {'id': mapped_jira_id}}) + # Update the assignee + assign_user(client, issue, existing) log.info('Updated assignee') + if confluence_client.update_stat: + confluence_data = {'Assignee': 1} + confluence_client.update_stat_page(confluence_data) return else: # Update the assignee if we have someone to assignee it too if update: - existing.update({'assignee': {'id': mapped_jira_id}}) + assign_user(client, issue, existing) log.info('Updated assignee') + if confluence_client.update_stat: + confluence_data = {'Assignee': 1} + confluence_client.update_stat_page(confluence_data) else: if existing.fields.assignee and not issue.assignee: # Else we should remove all assignees # Set removeAll flag to true - existing.update({'assignee': {'name': ''}}) + assign_user(client, issue, existing, remove_all=True) log.info('Updated assignee') + if confluence_client.update_stat: + confluence_data = {'Assignee': 1} + confluence_client.update_stat_page(confluence_data) def _update_jira_labels(issue, labels): @@ -778,6 +1080,9 @@ def _update_jira_labels(issue, labels): data = {'labels': _labels} issue.update(data) log.info('Updated %s tag(s)' % len(_labels)) + if confluence_client.update_stat: + confluence_data = {'Tags': len(_labels)} + confluence_client.update_stat_page(confluence_data) def _update_tags(updates, existing, issue): @@ -826,11 +1131,43 @@ def _update_description(existing, issue): r"Upstream description:(\r\n*|\r*|\n*|.*){quote}((?s).*){quote}", r"Upstream description: {quote}%s{quote}" % issue.content, new_description) + elif '] Upstream issue status:' in new_description and '] Upstream Reporter:' in new_description: + # We need to add a description field + today = datetime.today() + new_description = re.sub(r'\[[\w\W]*\] Upstream issue status: %s\n\[%s\] Upstream Reporter: %s' % ( + issue.status, issue.id, issue.reporter['fullname']), + r'[%s] Upstream issue status: %s\n[%s] Upstream Reporter: %s\n' + r'Upstream description: {quote}%s{quote}' % ( + today.strftime("%a %b %y - %H:%M"), issue.status, issue.id, + issue.reporter['fullname'], + issue.content), + new_description) + + elif '] Upstream issue status:' in new_description and '] Upstream Reporter:' not in new_description: + # We need to add a upstream reporter and description field + today = datetime.today() + new_description = re.sub(r'\[[\w\W]*\] Upstream issue status: %s' % issue.status, + r'[%s] Upstream issue status: %s\n' + r'[%s] Upstream Reporter: %s\n' + r'Upstream description: {quote}%s{quote}' % + (today.strftime("%a %b %y - %H:%M"), issue.status, issue.id, + issue.reporter['fullname'], issue.content), + new_description) + elif '] Upstream issue status:' not in new_description and '] Upstream Reporter:' in new_description: + # We need to just add the description field + new_description = re.sub( + r'\[%s\] Upstream Reporter: %s [\w\W]*' % (issue.id, issue.reporter['fullname']), + r'[%s] Upstream Reporter: %s \nUpstream description: {quote} %s {quote}' % + (issue.id, issue.reporter['fullname'], issue.content), new_description) else: - # Just add description to the top - upstream_description = "Upstream description: " \ + # Just add reporter and description to the top + upstream_reporter = '[%s] Upstream Reporter: %s' % ( + issue.id, + issue.reporter['fullname'] + ) + upstream_description = "%s \nUpstream description: " \ "{quote}%s{quote}" % \ - (issue.content) + (upstream_reporter, issue.content) new_description = '%s \n %s' % \ (upstream_description, new_description) # Now that we've updated the description (i.e. added @@ -844,9 +1181,21 @@ def _update_description(existing, issue): # Now we can update the JIRA issue if we need to if new_description != existing.fields.description: + # This logging is temporary and will be used to debug an + # issue regarding phantom updates + # Get the diff between new_description and existing + diff = difflib.unified_diff(existing.fields.description, new_description) + log.info(f"DEBUG: Issue {issue.title}") + log.info(f"DEBUG: Diff: {''.join(diff)}") + log.info(f"DEBUG: Old: {existing.fields.description}") + log.info(f"DEBUG: New: {new_description}") + data = {'description': new_description} existing.update(data) log.info('Updated description') + if confluence_client.update_stat: + confluence_data = {'Description': 1} + confluence_client.update_stat_page(confluence_data) def _update_on_close(existing, issue, updates): @@ -923,6 +1272,11 @@ def sync_with_jira(issue, config): # Create a client connection for this issue client = get_jira_client(issue, config) + # Check the status of the JIRA client + if not config['sync2jira']['develop'] and not check_jira_status(client): + log.warning('The JIRA server looks like its down. Shutting down...') + raise JIRAError + if issue.downstream.get('issue_updates', None): if issue.source == 'github' and issue.content and \ 'github_markdown' in issue.downstream['issue_updates']: @@ -939,7 +1293,7 @@ def sync_with_jira(issue, config): log.info("Testing flag is true. Skipping actual update.") return # Update relevant metadata (i.e. tags, assignee, etc) - _update_jira_issue(existing, issue, client, config) + _update_jira_issue(existing, issue, client) return # If we're *not* configured to do legacy matching (upgrade mode) then there @@ -960,8 +1314,83 @@ def sync_with_jira(issue, config): else: _upgrade_jira_issue(client, match, issue, config) -def get_jira_username_from_github(config, github_login): - """ Helper function to get JIRA username from Github login """ - for name, data in config['mapping'].items(): - if name == github_login: - return data['jira'] \ No newline at end of file + +def _close_as_duplicate(client, duplicate, keeper, config): + """ + Helper function to close an issue as a duplicate. + + :param jira.client client: JIRA Client + :param jira.resources.Issue duplicate: Duplicate JIRA Issue + :param jira.resources.Issue keeper: JIRA issue to keep + :param Dict config: Config dict + :returns: Nothing + """ + log.info("Closing %s as duplicate of %s", duplicate.permalink(), keeper.permalink()) + if config['sync2jira']['testing']: + log.info("Testing flag is true. Skipping actual delete.") + return + + # Find the id of some dropped or done state. + transitions = client.transitions(duplicate) + transitions = dict([(t['name'], t['id']) for t in transitions]) + closed = None + preferences = ['Dropped', 'Reject', 'Done', 'Closed', 'Closed (2)', ] + for preference in preferences: + if preference in transitions: + closed = transitions[preference] + break + + text = 'Marking as duplicate of %s' % keeper.key + if any([text in comment.body for comment in client.comments(duplicate)]): + log.info("Skipping comment. Already present.") + else: + client.add_comment(duplicate, text) + + text = '%s is a duplicate.' % duplicate.key + if any([text in comment.body for comment in client.comments(keeper)]): + log.info("Skipping comment. Already present.") + else: + client.add_comment(keeper, text) + + if closed: + try: + client.transition_issue(duplicate, closed, resolution={'name': 'Duplicate'}) + except Exception as e: + if "Field 'resolution' cannot be set" in e.response.text: + # Try closing without a specific resolution. + try: + client.transition_issue(duplicate, closed) + except Exception: + log.exception("Failed to close %r", duplicate.permalink()) + else: + log.exception("Failed to close %r", duplicate.permalink()) + else: + log.warning("Unable to find close transition for %r" % duplicate.key) + + +def close_duplicates(issue, config): + """ + Function to close duplicate JIRA issues. + + :param sync2jira.intermediary.Issue issue: Upstream Issue + :param Dict config: Config dict + :returns: Nothing + """ + # Create a client connection for this issue + client = get_jira_client(issue, config) + + # Check the status of the JIRA client + if not config['sync2jira']['develop'] and not check_jira_status(client): + log.warning('The JIRA server looks like its down. Shutting down...') + raise JIRAError + + log.info("Looking for dupes of upstream %s, %s", issue.url, issue.title) + results = _matching_jira_issue_query(client, issue, config, free=True) + if len(results) <= 1: + log.info("No duplicates found.") + return + + results = sorted(results, key=lambda x: arrow.get(x.fields.created)) + keeper, duplicates = results[0], results[1:] + for duplicate in duplicates: + _close_as_duplicate(client, duplicate, keeper, config) diff --git a/sync2jira/downstream_pr.py b/sync2jira/downstream_pr.py index 25da5f4..e01ecec 100644 --- a/sync2jira/downstream_pr.py +++ b/sync2jira/downstream_pr.py @@ -25,12 +25,13 @@ # Local Modules import sync2jira.downstream_issue as d_issue from sync2jira.intermediary import Issue, matcher +from sync2jira.confluence_client import confluence_client log = logging.getLogger('sync2jira') -def format_comment(pr, pr_suffix, client, config): +def format_comment(pr, pr_suffix, client): """ Formats comment to link PR. :param sync2jira.intermediary.PR pr: Upstream issue we're pulling data from @@ -40,21 +41,25 @@ def format_comment(pr, pr_suffix, client, config): :rtype: String """ # Find the pr.reporters JIRA username - ret = get_jira_username_from_github(config, pr.reporter) - if ret: - reporter = f"[~accountid:{ret}]" + ret = client.search_users(pr.reporter) + if len(ret) > 0: + # Loop through ret till we find an match + for user in ret: + if user.displayName == pr.reporter: + reporter = f"[~{user.key}]" + break else: reporter = pr.reporter if 'closed' in pr_suffix: - comment = f"Merge request [{pr.title.replace(']', '').replace('[', '')}|{pr.url}] was closed." + comment = f"Merge request [{pr.title}| {pr.url}] was closed." elif 'reopened' in pr_suffix: - comment = f"Merge request [{pr.title.replace(']', '').replace('[', '')}|{pr.url}] was reopened." + comment = f"Merge request [{pr.title}| {pr.url}] was reopened." elif 'merged' in pr_suffix: - comment = f"Merge request [{pr.title.replace(']', '').replace('[', '')}|{pr.url}] was merged!" + comment = f"Merge request [{pr.title}| {pr.url}] was merged!" else: comment = f"{reporter} mentioned this issue in " \ - f"merge request [{pr.title.replace(']', '').replace('[', '')}| {pr.url}]." + f"merge request [{pr.title}| {pr.url}]." return comment @@ -92,33 +97,34 @@ def comment_exists(client, existing, new_comment): return False -def update_jira_issue(existing, pr, client, config): +def update_jira_issue(existing, pr, client): """ Updates an existing JIRA issue (i.e. tags, assignee, comments etc). :param jira.resources.Issue existing: Existing JIRA issue that was found :param sync2jira.intermediary.PR pr: Upstream issue we're pulling data from :param jira.client.JIRA client: JIRA Client - :param dict config: Config dict :returns: Nothing """ # Get our updates array updates = pr.downstream.get('pr_updates', {}) # Format and add comment to indicate PR has been linked - new_comment = format_comment(pr, pr.suffix, client, config) - + new_comment = format_comment(pr, pr.suffix, client) # See if the issue_link and comment exists exists = issue_link_exists(client, existing, pr) comment_exist = comment_exists(client, existing, new_comment) # Check if the comment if already there if not exists: + if not comment_exist: + log.info(f"Added comment for PR {pr.title} on JIRA {pr.jira_key}") + client.add_comment(existing, new_comment) # Attach remote link remote_link = dict(url=pr.url, title=f"[PR] {pr.title}") d_issue.attach_link(client, existing, remote_link) - if not comment_exist: - log.info(f"Added comment for PR {pr.title} on JIRA {pr.jira_key}") - client.add_comment(existing, new_comment) + if confluence_client.update_stat: + confluence_data = {'Comments': 1} + confluence_client.update_stat_page(confluence_data) # Only synchronize link_transition for listings that op-in if any('merge_transition' in item for item in updates) and 'merged' in pr.suffix: @@ -144,14 +150,12 @@ def update_transition(client, existing, pr, transition_type): :returns: Nothing """ # Get our closed status - link_status = [transition for transition in pr.downstream.get('pr_updates', []) if transition_type in transition] - if link_status: - closed_status = link_status[0][transition_type] + closed_status = list(filter(lambda d: transition_type in d, pr.downstream.get('pr_updates', {})))[0][transition_type] - # Update the state - d_issue.change_status(client, existing, closed_status, pr) + # Update the state + d_issue.change_status(client, existing, closed_status, pr) - log.info(f"Updated {transition_type} for issue {pr.title}") + log.info(f"Updated {transition_type} for issue {pr.title}") def sync_with_jira(pr, config): @@ -177,6 +181,11 @@ def sync_with_jira(pr, config): # Create a client connection for this issue client = d_issue.get_jira_client(pr, config) + # Check the status of the JIRA client + if not config['sync2jira']['develop'] and not d_issue.check_jira_status(client): + log.warning('The JIRA server looks like its down. Shutting down...') + raise JIRAError + # Find our JIRA issue if one exists if isinstance(pr, Issue): pr.jira_key = matcher(pr.content, pr.comments) @@ -198,11 +207,5 @@ def sync_with_jira(pr, config): # Else start syncing relevant information log.info(f"Syncing PR {pr.title}") - update_jira_issue(existing, pr, client, config) + update_jira_issue(existing, pr, client) log.info(f"Done syncing PR {pr.title}") - -def get_jira_username_from_github(config, github_login): - """ Helper function to get JIRA username from Github login """ - for name, data in config['mapping'].items(): - if name == github_login: - return data['jira'] \ No newline at end of file diff --git a/sync2jira/email_template.jinja b/sync2jira/email_template.jinja new file mode 100644 index 0000000..afac25b --- /dev/null +++ b/sync2jira/email_template.jinja @@ -0,0 +1,29 @@ + + +

Hello {{ user['name'] }},
It looks like you have some duplicate issues for + upstream issue {{ issue._title }}

+

This issue was selected:

+ +

But these issues were also found:

+ +

Make sure to mark these duplicate issues as 'Closed' to avoid these emails!

+ {% if admins|length > 0 %} +

Questions? Get in contact with one of the admins: + {% for admin in admins %} + {{ admin.name }} + {{ "," if not loop.last }} + {% endfor %} +

+ {% endif %} + + \ No newline at end of file diff --git a/sync2jira/failure_template.jinja b/sync2jira/failure_template.jinja new file mode 100644 index 0000000..3b6ac1f --- /dev/null +++ b/sync2jira/failure_template.jinja @@ -0,0 +1,7 @@ + + +

Looks like Sync2Jira has failed!

+

Here is the full traceback:

+ {{ traceback }} + + \ No newline at end of file diff --git a/sync2jira/intermediary.py b/sync2jira/intermediary.py index b9865b4..e3a36cd 100644 --- a/sync2jira/intermediary.py +++ b/sync2jira/intermediary.py @@ -60,6 +60,53 @@ def title(self): def upstream_title(self): return self._title + @classmethod + def from_pagure(cls, upstream, issue, config): + """Helper function to create intermediary object.""" + base = config['sync2jira'].get('pagure_url', 'https://pagure.io') + upstream_source = 'pagure' + comments = [] + for comment in issue['comments']: + # Only add comments that are not Metadata updates + if '**Metadata Update' in comment['comment']: + continue + # Else add the comment + # Convert the date to datetime + comment['date_created'] = datetime.fromtimestamp(float(comment['date_created'])) + comments.append({ + 'author': comment['user']['name'], + 'body': comment['comment'], + 'name': comment['user']['name'], + 'id': comment['id'], + 'date_created': comment['date_created'], + 'changed': None + }) + + # Perform any mapping + mapping = config['sync2jira']['map'][upstream_source][upstream].get('mapping', []) + + # Check for fixVersion + if any('fixVersion' in item for item in mapping): + map_fixVersion(mapping, issue) + + return Issue( + source=upstream_source, + title=issue['title'], + url=base + '/%s/issue/%i' % (upstream, issue['id']), + upstream=upstream, + config=config, + comments=comments, + tags=issue['tags'], + fixVersion=[issue['milestone']], + priority=issue['priority'], + content=issue['content'], + reporter=issue['user'], + assignee=issue['assignee'], + status=issue['status'], + id=issue['date_created'], + upstream_id=issue['id'] + ) + @classmethod def from_github(cls, upstream, issue, config): """Helper function to create intermediary object.""" @@ -124,6 +171,8 @@ def __init__(self, source, jira_key, title, url, upstream, config, self.url = url self.upstream = upstream self.comments = comments + # self.tags = tags + # self.fixVersion = fixVersion self.priority = priority # JIRA treats utf-8 characters in ways we don't totally understand, so scrub content down to @@ -142,6 +191,7 @@ def __init__(self, source, jira_key, title, url, upstream, config, self.id = str(id) self.suffix = suffix self.match = match + # self.upstream_id = upstream_id if not downstream: self.downstream = config['sync2jira']['map'][self.source][upstream] @@ -153,6 +203,59 @@ def __init__(self, source, jira_key, title, url, upstream, config, def title(self): return u'[%s] %s' % (self.upstream, self._title) + @classmethod + def from_pagure(self, upstream, pr, suffix, config): + """Helper function to create intermediary object.""" + # Set our upstream source + upstream_source = 'pagure' + + # Format our comments + comments = [] + for comment in pr['comments']: + # Only add comments that are not Metadata updates + if '**Metadata Update' in comment['comment']: + continue + # Else add the comment + # Convert the date to datetime + comment['date_created'] = datetime.fromtimestamp( + float(comment['date_created'])) + comments.append({ + 'author': comment['user']['name'], + 'body': comment['comment'], + 'name': comment['user']['name'], + 'id': comment['id'], + 'date_created': comment['date_created'], + 'changed': None + }) + + # Build our URL + url = f"https://pagure.io/{pr['project']['name']}/pull-request/{pr['id']}" + + # Match a JIRA + match = matcher(pr.get('initial_comment'), comments) + + # Return our PR object + return PR( + source=upstream_source, + jira_key=match, + title=pr['title'], + url=url, + upstream=upstream, + config=config, + comments=comments, + # tags=issue['labels'], + # fixVersion=[issue['milestone']], + priority=None, + content=pr['initial_comment'], + reporter=pr['user']['fullname'], + assignee=pr['assignee'], + status=pr['status'], + id=pr['id'], + suffix=suffix, + match=match, + # upstream_id=issue['number'] + ) + @classmethod def from_github(self, upstream, pr, suffix, config): """Helper function to create intermediary object.""" @@ -177,6 +280,16 @@ def from_github(self, upstream, pr, suffix, config): # Match to a JIRA match = matcher(pr.get("body"), comments) + # Figure out what state we're transitioning too + if 'reopened' in suffix: + suffix = 'reopened' + elif 'closed' in suffix: + # Check if we're merging or closing + if pr['merged']: + suffix = 'merged' + else: + suffix = 'closed' + # Return our PR object return PR( source=upstream_source, @@ -186,13 +299,16 @@ def from_github(self, upstream, pr, suffix, config): upstream=upstream, config=config, comments=comments, + # tags=issue['labels'], + # fixVersion=[issue['milestone']], priority=None, content=pr.get('body'), reporter=pr['user']['fullname'], - assignee=pr['assignees'], + assignee=pr['assignee'], # GitHub PRs do not have status status=None, id=pr['number'], + # upstream_id=issue['number'], suffix=suffix, match=match, ) diff --git a/sync2jira/mailer.py b/sync2jira/mailer.py new file mode 100644 index 0000000..35a9324 --- /dev/null +++ b/sync2jira/mailer.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 +""" +This script is used to send emails +""" + +import smtplib +import os +from email.mime.text import MIMEText +from email.mime.multipart import MIMEMultipart + +DEFAULT_FROM = os.environ.get('DEFAULT_FROM') +DEFAULT_SERVER = os.environ.get('DEFAULT_SERVER') + + +def send_mail(recipients, subject, text, cc): + """ + Sends email to recipients. + + :param List recipients: recipients of email + :param String subject: subject of the email + :param String text: HTML text + :param String cc: cc of the email + :param String text: text of the email + :returns: Nothing + """ + _cfg = {} + _cfg.setdefault("server", DEFAULT_SERVER) + _cfg.setdefault("from", DEFAULT_FROM) + sender = _cfg["from"] + msg = MIMEMultipart('related') + msg["Subject"] = subject + msg["From"] = sender + msg["To"] = ", ".join(recipients) + if cc: + msg['Cc'] = ", ".join(cc) + server = smtplib.SMTP(_cfg["server"]) + part = MIMEText(text, 'html', 'utf-8') + msg.attach(part) + server.sendmail(sender, recipients, msg.as_string()) + server.quit() diff --git a/sync2jira/main.py b/sync2jira/main.py index f0f003c..198afe2 100644 --- a/sync2jira/main.py +++ b/sync2jira/main.py @@ -16,7 +16,7 @@ # License along with sync2jira; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110.15.0 USA # -# Authors: Ralph Bean , Sid Premkumar +# Authors: Ralph Bean """ Sync github and pagure issues to a jira instance, via fedmsg. Run with systemd, please. @@ -29,9 +29,10 @@ import requests from copy import deepcopy import os -import json # 3rd Party Modules +import fedmsg +import fedmsg.config import jinja2 from requests_kerberos import HTTPKerberosAuth, OPTIONAL @@ -40,7 +41,9 @@ import sync2jira.upstream_pr as u_pr import sync2jira.downstream_issue as d_issue import sync2jira.downstream_pr as d_pr +from sync2jira.mailer import send_mail from sync2jira.intermediary import matcher +from sync2jira.confluence_client import confluence_client # Set up our logging FORMAT = "[%(asctime)s] %(levelname)s: %(message)s" @@ -48,11 +51,66 @@ logging.basicConfig(format=FORMAT, level=logging.DEBUG) logging.basicConfig(format=FORMAT, level=logging.WARNING) log = logging.getLogger('sync2jira') - +if os.environ.get('CONFLUENCE_SPACE') == 'mock_confluence_space': + # If we are debugging save log output + try: + hdlr = logging.FileHandler('sync2jira_main.log') + log.addHandler(hdlr) + log.setLevel(logging.DEBUG) + except: # noqa: E722 + log.error("Unable to create log file!") + +# Only allow fedmsg logs that are critical +fedmsg_log = logging.getLogger('fedmsg.crypto.utils') +fedmsg_log.setLevel(50) + +remote_link_title = "Upstream issue" +failure_email_subject = "Sync2Jira Has Failed!" + +# Issue related handlers +issue_handlers = { + # GitHub + 'github.issue.opened': u_issue.handle_github_message, + 'github.issue.reopened': u_issue.handle_github_message, + 'github.issue.labeled': u_issue.handle_github_message, + 'github.issue.assigned': u_issue.handle_github_message, + 'github.issue.unassigned': u_issue.handle_github_message, + 'github.issue.closed': u_issue.handle_github_message, + 'github.issue.comment': u_issue.handle_github_message, + 'github.issue.unlabeled': u_issue.handle_github_message, + 'github.issue.milestoned': u_issue.handle_github_message, + 'github.issue.demilestoned': u_issue.handle_github_message, + 'github.issue.edited': u_issue.handle_github_message, + # Pagure + 'pagure.issue.new': u_issue.handle_pagure_message, + 'pagure.issue.tag.added': u_issue.handle_pagure_message, + 'pagure.issue.comment.added': u_issue.handle_pagure_message, + 'pagure.issue.comment.edited': u_issue.handle_pagure_message, + 'pagure.issue.assigned.added': u_issue.handle_pagure_message, + 'pagure.issue.assigned.reset': u_issue.handle_pagure_message, + 'pagure.issue.edit': u_issue.handle_pagure_message, + 'pagure.issue.drop': u_issue.handle_pagure_message, + 'pagure.issue.tag.removed': u_issue.handle_pagure_message, +} + +# PR related handlers +pr_handlers = { + # GitHub + 'github.pull_request.opened': u_pr.handle_github_message, + 'github.pull_request.edited': u_pr.handle_github_message, + 'github.issue.comment': u_pr.handle_github_message, + 'github.pull_request.reopened': u_pr.handle_github_message, + 'github.pull_request.closed': u_pr.handle_github_message, + # Pagure + 'pagure.pull-request.new': u_pr.handle_pagure_message, + 'pagure.pull-request.comment.added': u_pr.handle_pagure_message, + 'pagure.pull-request.initial_comment.edited': u_pr.handle_pagure_message, +} +DATAGREPPER_URL = "http://apps.fedoraproject.org/datagrepper/raw" INITIALIZE = os.getenv('INITIALIZE', '0') -def load_config(config=os.environ['SYNC2JIRA_CONFIG']): +def load_config(loader=fedmsg.config.load_config): """ Generates and validates the config file \ that will be used by fedmsg and JIRA client. @@ -61,8 +119,10 @@ def load_config(config=os.environ['SYNC2JIRA_CONFIG']): :returns: The config dict to be used later in the program :rtype: Dict """ - with open(config, 'r') as jsonFile: - config = json.loads(jsonFile.read()) + config = loader() + + # Force some vars that we like + config['mute'] = True # Validate it if 'sync2jira' not in config: @@ -71,7 +131,7 @@ def load_config(config=os.environ['SYNC2JIRA_CONFIG']): if 'map' not in config['sync2jira']: raise ValueError("No sync2jira.map section found in fedmsg.d/ config") - possible = set(['github']) + possible = set(['pagure', 'github']) specified = set(config['sync2jira']['map'].keys()) if not specified.issubset(possible): message = "Specified handlers: %s, must be a subset of %s." @@ -81,14 +141,7 @@ def load_config(config=os.environ['SYNC2JIRA_CONFIG']): )) if 'jira' not in config['sync2jira']: - raise ValueError("No sync2jira.jira section found in config") - - # Update config based on env vars - config['sync2jira']['github_token'] = os.environ['SYNC2JIRA_GITHUB_TOKEN'] - config['sync2jira']['jira'][config['sync2jira']['default_jira_instance']]['basic_auth'] = ( - os.environ['SYNC2JIRA_JIRA_USERNAME'], - os.environ['SYNC2JIRA_JIRA_PASSWORD'] - ) + raise ValueError("No sync2jira.jira section found in fedmsg.d/ config") # Provide some default values defaults = { @@ -100,40 +153,32 @@ def load_config(config=os.environ['SYNC2JIRA_CONFIG']): return config -def listen(config, event_emitter): +def listen(config): """ Listens to activity on upstream repos on pagure and github \ via fedmsg, and syncs new issues there to the JIRA instance \ defined in 'fedmsg.d/sync2jira.py' :param Dict config: Config dict - :param rxObject event_emitter: Event emitter to wait for :returns: Nothing """ if not config['sync2jira'].get('listen'): log.info("`listen` is disabled. Exiting.") return - log.info("Waiting for a relevant webhook message to arrive...") - event_emitter.subscribe( - lambda x: handle_message(config, x) - ) + log.info("Waiting for a relevant fedmsg message to arrive...") + for _, _, topic, msg in fedmsg.tail_messages(**config): + idx = msg['msg_id'] + suffix = ".".join(topic.split('.')[3:]) + log.debug("Encountered %r %r %r", suffix, topic, idx) + + if suffix not in issue_handlers and suffix not in pr_handlers: + continue + + log.debug("Handling %r %r %r", suffix, topic, idx) + + handle_msg(msg, suffix, config) - while True: - # Constantly refresh the config file - config = load_config() - sleep(10) - -def handle_message(config, incoming_json): - if ('pull_request' in incoming_json.keys()): - pr = u_pr.handle_github_message(config, incoming_json) - if pr: - d_pr.sync_with_jira(pr, config) - elif ('issue' in incoming_json.keys()): - issue = u_issue.handle_github_message(config, incoming_json) - if issue: - d_issue.sync_with_jira(issue, config) - def initialize_issues(config, testing=False, repo_name=None): """ @@ -150,6 +195,18 @@ def initialize_issues(config, testing=False, repo_name=None): log.info("Running initialization to sync all issues from upstream to jira") log.info("Testing flag is %r", config['sync2jira']['testing']) mapping = config['sync2jira']['map'] + for upstream in mapping.get('pagure', {}).keys(): + if 'issue' not in mapping.get('pagure', {}).get(upstream, {}).get('sync', []): + continue + if repo_name is not None and upstream != repo_name: + continue + for issue in u_issue.pagure_issues(upstream, config): + try: + d_issue.sync_with_jira(issue, config) + except Exception as e: + log.error(f"Failed on {issue}\nException: {e}") + raise + log.info("Done with pagure issue initialization.") for upstream in mapping.get('github', {}).keys(): if 'issue' not in mapping.get('github', {}).get(upstream, {}).get('sync', []): @@ -176,6 +233,7 @@ def initialize_issues(config, testing=False, repo_name=None): else: if not config['sync2jira']['develop']: # Only send the failure email if we are not developing + report_failure(config) raise log.info("Done with github issue initialization.") @@ -195,6 +253,15 @@ def initialize_pr(config, testing=False, repo_name=None): log.info("Running initialization to sync all PRs from upstream to jira") log.info("Testing flag is %r", config['sync2jira']['testing']) mapping = config['sync2jira']['map'] + for upstream in mapping.get('pagure', {}).keys(): + if 'pullrequest' not in mapping.get('pagure', {}).get(upstream, {}).get('sync', []): + continue + if repo_name is not None and upstream != repo_name: + continue + for pr in u_pr.pagure_prs(upstream, config): + if pr: + d_pr.sync_with_jira(pr, config) + log.info("Done with pagure PR initialization.") for upstream in mapping.get('github', {}).keys(): if 'pullrequest' not in mapping.get('github', {}).get(upstream, {}).get('sync', []): @@ -222,16 +289,144 @@ def initialize_pr(config, testing=False, repo_name=None): else: if not config['sync2jira']['develop']: # Only send the failure email if we are not developing + report_failure(config) raise log.info("Done with github PR initialization.") -def main(event_emitter): + +def initialize_recent(config): + """ + Initializes based on the recent history of datagrepper + + :param Dict config: Config dict + :return: Nothing + """ + # Query datagrepper + ret = query(category=['github', 'pagure'], delta=int(600), rows_per_page=100) + + # Loop and sync + for entry in ret: + # Extract our topic + suffix = ".".join(entry['topic'].split('.')[3:]) + log.debug("Encountered %r %r", suffix, entry['topic']) + + # Disregard if it's invalid + if suffix not in issue_handlers and suffix not in pr_handlers: + continue + + # Deal with the message + log.debug("Handling %r %r", suffix, entry['topic']) + msg = entry['msg'] + handle_msg({'msg': msg}, suffix, config) + + +def handle_msg(msg, suffix, config): + """ + Function to handle incomming message from datagrepper + :param Dict msg: Incoming message + :param String suffix: Incoming suffix + :param Dict config: Config dict + """ + issue = None + pr = None + # Github '.issue.' is used for both PR and Issue + # Check for that edge case + if suffix == 'github.issue.comment': + if 'pull_request' in msg['msg']['issue'] and msg['msg']['action'] != 'deleted': + # pr_filter turns on/off the filtering of PRs + pr = issue_handlers[suffix](msg, config, pr_filter=False) + if not pr: + return + # Issues do not have suffix and reporter needs to be reformatted + pr.suffix = suffix + pr.reporter = pr.reporter.get('fullname') + setattr(pr, 'match', matcher(pr.content, pr.comments)) + else: + issue = issue_handlers[suffix](msg, config) + elif suffix in issue_handlers: + issue = issue_handlers[suffix](msg, config) + elif suffix in pr_handlers: + pr = pr_handlers[suffix](msg, config, suffix) + + if not issue and not pr: + return + if issue: + d_issue.sync_with_jira(issue, config) + elif pr: + d_pr.sync_with_jira(pr, config) + + +def query(limit=None, **kwargs): + """ + Run query on Datagrepper + + Args: + limit: the max number of messages to fetch at a time + kwargs: keyword arguments to build request parameters + """ + # Pack up the kwargs into a parameter list for request + params = deepcopy(kwargs) + + # Set up for paging requests + all_results = [] + page = params.get('page', 1) + + # Important to set ASC order when paging to avoid duplicates + params['order'] = 'asc' + + results = get(params=params) + + # Collect the messages + all_results.extend(results['raw_messages']) + + # Set up for loop + fetched = results['count'] + total = limit or results['total'] + + # Fetch results until no more are left + while fetched < total: + page += 1 + params['page'] = page + + results = get(params=params) + count = results['count'] + fetched += count + + # if we missed the condition and haven't fetched any + if count == 0: + break + + all_results.extend(results['raw_messages']) + + return all_results + + +def get(params): + url = DATAGREPPER_URL + headers = {'Accept': 'application/json', } + + response = requests.get(url=url, params=params, headers=headers, + auth=HTTPKerberosAuth(mutual_authentication=OPTIONAL)) + return response.json() + + +def main(runtime_test=False, runtime_config=None): """ Main function to check for initial sync - and listen. + and listen for fedmgs. + + :param Bool runtime_test: Flag to indicate if we are performing a runtime test. Default false + :param Dict runtime_config: Config file to be used if it is a runtime test. runtime_test must be true + :return: Nothing """ - # Load config - config = load_config() + # Load config and disable warnings + if not runtime_test or not runtime_config: + config = load_config() + else: + config = runtime_config + + if config['sync2jira']['confluence_statistics']: + confluence_client.update_stat_value(True) logging.basicConfig(level=logging.INFO) warnings.simplefilter("ignore") @@ -245,17 +440,93 @@ def main(event_emitter): initialize_issues(config) log.info("Initializing PRs...") initialize_pr(config) + if runtime_test: + return else: # Pool datagrepper from the last 10 mins - log.info("Initialization False...") + log.info("Initialization False. Pulling data from datagrepper...") + initialize_recent(config) try: - listen(config, event_emitter) + listen(config) except KeyboardInterrupt: pass except: # noqa: E722 if not config['sync2jira']['develop']: # Only send the failure email if we are not developing + report_failure(config) raise + +def report_failure(config): + """ + Helper function to alert admins in case of failure. + + + :param Dict config: Config dict for JIRA + """ + # Email our admins with the traceback + templateLoader = jinja2.FileSystemLoader( + searchpath='usr/local/src/sync2jira/sync2jira/') + templateEnv = jinja2.Environment(loader=templateLoader) + template = templateEnv.get_template('failure_template.jinja') + html_text = template.render(traceback=traceback.format_exc()) + + # Send mail + send_mail(recipients=[config['sync2jira']['mailing-list']], + cc=None, + subject=failure_email_subject, + text=html_text) + + +def list_managed(): + """ + Function to list URL for issues under map in config. + + :return: Nothing + """ + config = load_config() + mapping = config['sync2jira']['map'] + warnings.simplefilter("ignore") + + for upstream in mapping.get('pagure', {}).keys(): + for issue in u_issue.pagure_issues(upstream, config): + print(issue.url) + + for upstream in mapping.get('github', {}).keys(): + for issue in u_issue.github_issues(upstream, config): + print(issue.url) + + +def close_duplicates(): + """ + Function to close duplicate functions. Uses downstream:close_duplicates. + + :return: Nothing + """ + config = load_config() + logging.basicConfig(level=logging.INFO) + log.info("Testing flag is %r", config['sync2jira']['testing']) + mapping = config['sync2jira']['map'] + warnings.simplefilter("ignore") + + for upstream in mapping.get('pagure', {}).keys(): + for issue in u_issue.pagure_issues(upstream, config): + try: + d_issue.close_duplicates(issue, config) + except Exception: + log.error("Failed on %r", issue) + raise + log.info("Done with pagure duplicates.") + + for upstream in mapping.get('github', {}).keys(): + for issue in u_issue.github_issues(upstream, config): + try: + d_issue.close_duplicates(issue, config) + except Exception: + log.error("Failed on %r", issue) + raise + log.info("Done with github duplicates.") + + if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/sync2jira/upstream_issue.py b/sync2jira/upstream_issue.py index 4ee3b75..834453c 100644 --- a/sync2jira/upstream_issue.py +++ b/sync2jira/upstream_issue.py @@ -36,37 +36,68 @@ log = logging.getLogger('sync2jira') -def handle_github_message(config, msg): +def handle_github_message(msg, config, pr_filter=True): """ - Handle GitHub message from webhook. + Handle GitHub message from FedMsg. - :param Dict msg: webhook Message + :param Dict msg: FedMsg Message :param Dict config: Config File + :param Bool pr_filter: Switch to ignore pull_requests :returns: Issue object :rtype: sync2jira.intermediary.Issue """ - owner = msg['repository']['owner']['login'] - repo = msg['repository']['name'] - + owner = msg['msg']['repository']['owner']['login'] + repo = msg['msg']['repository']['name'] upstream = '{owner}/{repo}'.format(owner=owner, repo=repo) mapped_repos = config['sync2jira']['map']['github'] if upstream not in mapped_repos: log.debug("%r not in Github map: %r", upstream, mapped_repos.keys()) return None + elif 'issue' not in mapped_repos[upstream]['sync'] and pr_filter is True: + log.debug("%r not in Github Issue map: %r", upstream, mapped_repos.keys()) + return None + elif 'pullrequest' not in mapped_repos[upstream]['sync'] and pr_filter is False: + log.debug("%r not in Github PR map: %r", upstream, mapped_repos.keys()) + return None + + _filter = config['sync2jira']\ + .get('filters', {})\ + .get('github', {})\ + .get(upstream, {}) + + for key, expected in _filter.items(): + # special handling for label: we look for it in the list of msg labels + if key == 'labels': + actual = [label['name'] for label in msg['msg']['issue']['labels']] + if expected not in actual: + log.debug("Label %s not set on issue: %s", expected, upstream) + return None + else: + # direct comparison + actual = msg['msg']['issue'].get(key) + if actual != expected: + log.debug("Actual %r %r != expected %r on issue %s", + key, actual, expected, upstream) + return None + + if pr_filter and 'pull_request' in msg['msg']['issue']: + if not msg['msg']['issue'].get('closed_at', None): + log.debug("%r is a pull request. Ignoring.", msg['msg']['issue'].get('html_url')) + return None # Initialize Github object so we can get their full name (instead of their username) # And get comments if needed github_client = Github(config['sync2jira']['github_token'], retry=5) # If there are no comments just make an empty array - if msg['issue']['comments'] == 0: - msg['issue']['comments'] = [] + if msg['msg']['issue']['comments'] == 0: + msg['msg']['issue']['comments'] = [] else: # We have multiple comments and need to make api call to get them repo = github_client.get_repo(upstream) comments = [] - github_issue = repo.get_issue(number=msg['issue']['number']) + github_issue = repo.get_issue(number=msg['msg']['issue']['number']) for comment in github_issue.get_comments(): # First make API call to get the users name comments.append({ @@ -78,40 +109,160 @@ def handle_github_message(config, msg): 'changed': None }) # Assign the message with the newly formatted comments :) - msg['issue']['comments'] = comments + msg['msg']['issue']['comments'] = comments # Search for the user - reporter = github_client.get_user(msg['issue']['user']['login']) + reporter = github_client.get_user(msg['msg']['issue']['user']['login']) # Update the reporter field in the message (to match Pagure format) if reporter.name: - msg['issue']['user']['fullname'] = reporter.name + msg['msg']['issue']['user']['fullname'] = reporter.name else: - msg['issue']['user']['fullname'] = \ - msg['issue']['user']['login'] + msg['msg']['issue']['user']['fullname'] = \ + msg['msg']['issue']['user']['login'] # Now do the same thing for the assignees assignees = [] - for person in msg['issue']['assignees']: + for person in msg['msg']['issue']['assignees']: assignee = github_client.get_user(person['login']) assignees.append({'fullname': assignee.name}) # Update the assignee field in the message (to match Pagure format) - msg['issue']['assignees'] = assignees + msg['msg']['issue']['assignees'] = assignees # Update the label field in the message (to match Pagure format) - if msg['issue']['labels']: + if msg['msg']['issue']['labels']: # loop through all the labels on Github and add them # to the new label list and then reassign the message new_label = [] - for label in msg['issue']['labels']: + for label in msg['msg']['issue']['labels']: new_label.append(label['name']) - msg['issue']['labels'] = new_label + msg['msg']['issue']['labels'] = new_label # Update the milestone field in the message (to match Pagure format) - if msg['issue']['milestone']: - msg['issue']['milestone'] = msg['issue']['milestone']['title'] + if msg['msg']['issue']['milestone']: + msg['msg']['issue']['milestone'] = msg['msg']['issue']['milestone']['title'] + + return i.Issue.from_github(upstream, msg['msg']['issue'], config) - return i.Issue.from_github(upstream, msg['issue'], config) + +def handle_pagure_message(msg, config): + """ + Handle Pagure message from FedMsg. + + :param Dict msg: FedMsg Message + :param Dict config: Config File + :returns: Issue object + :rtype: sync2jira.intermediary.Issue + """ + upstream = msg['msg']['project']['name'] + ns = msg['msg']['project'].get('namespace') or None + if ns: + upstream = '{ns}/{upstream}'.format(ns=ns, upstream=upstream) + mapped_repos = config['sync2jira']['map']['pagure'] + + if upstream not in mapped_repos: + log.debug("%r not in Pagure map: %r", upstream, mapped_repos.keys()) + return None + elif 'issue' not in mapped_repos[upstream]['sync']: + log.debug("%r not in Pagure issue map: %r", upstream, mapped_repos.keys()) + return None + + _filter = config['sync2jira']\ + .get('filters', {})\ + .get('pagure', {}) \ + .get(upstream, {}) + + if _filter: + for key, expected in _filter.items(): + # special handling for tag: we look for it in the list of msg tags + if key == 'tags': + actual = msg['msg']['issue'].get('tags', []) + msg['msg'].get('tags', []) + + # Some messages send tags as strings, others as dicts. Handle both. + actual = \ + [tag['name'] for tag in actual if isinstance(tag, dict)] + \ + [tag for tag in actual if isinstance(tag, string_type)] + + intersection = set(actual) & set(expected) + if not intersection: + log.debug("None of %r in %r on issue: %s", + expected, actual, upstream) + return None + else: + # direct comparison + actual = msg['msg']['issue'].get(key) + if actual != expected: + log.debug("Actual %r %r != expected %r on issue: %s", + key, actual, expected, upstream) + return None + + # If this is a dropped issue upstream + try: + if msg['topic'] == 'io.pagure.prod.pagure.issue.drop': + msg['msg']['issue']['status'] = 'Dropped' + except KeyError: + # Otherwise do nothing + pass + + # If this is a tag edit upstream + try: + # Add all updated tags to the tags on the issue + for tag in msg['msg']['tags']: + msg['msg']['issue']['tags'].append(tag) + except KeyError: + # Otherwise do nothing + pass + + # If this is a comment edit + try: + # Add it to the comments on the issue + msg['msg']['issue']['comments'].append(msg['msg']['comment']) + except KeyError: + # Otherwise do nothing + pass + + # Format the assignee field to match github (i.e. in a list) + msg['msg']['issue']['assignee'] = [msg['msg']['issue']['assignee']] + + return i.Issue.from_pagure(upstream, msg['msg']['issue'], config) + + +def pagure_issues(upstream, config): + """ + Creates a Generator for all Pagure issues in upstream repo. + + :param String upstream: Upstream Repo + :param Dict config: Config Dict + :returns: Pagure Issue object generator + :rtype: sync2jira.intermediary.Issue + """ + base = config['sync2jira'].get('pagure_url', 'https://pagure.io') + url = base + '/api/0/' + upstream + '/issues' + + params = config['sync2jira']\ + .get('filters', {})\ + .get('pagure', {}) \ + .get(upstream, {}) + + response = requests.get(url, params=params) + if not bool(response): + try: + reason = response.json() + except Exception: + reason = response.text + raise IOError("response: %r %r %r" % (response, reason, response.request.url)) + data = response.json()['issues'] + + # Reformat the assignee value so that it is enclosed within an array + # We do this because Github supports multiple assignees, but JIRA doesn't :( + # Hopefully in the future it will support multiple assignees, thus enclosing + # the assignees in a list prepares for that support + for issue in data: + issue['assignee'] = [issue['assignee']] + + issues = (i.Issue.from_pagure(upstream, issue, config) for issue in data) + for issue in issues: + yield issue def github_issues(upstream, config): @@ -124,26 +275,39 @@ def github_issues(upstream, config): :rtype: sync2jira.intermediary.Issue """ token = config['sync2jira'].get('github_token') - + if not token: + headers = {} + log.warning('No github_token found. We will be rate-limited...') + else: + headers = {'Authorization': 'token ' + token} + + _filter = config['sync2jira']\ + .get('filters', {})\ + .get('github', {})\ + .get(upstream, {}) + + url = 'https://api.github.com/repos/%s/issues' % upstream + if _filter: + url += '?' + urlencode(_filter) + + issues = get_all_github_data(url, headers) + # Initialize Github object so we can get their full name (instead of their username) # And get comments if needed github_client = Github(config['sync2jira']['github_token'], retry=5) - issues = get_all_github_data(upstream, github_client) # We need to format everything to a standard to we can create an issue object final_issues = [] for issue in issues: - final_issue = {} - # Update comments: # If there are no comments just make an empty array - if issue.comments == 0: - final_issue['comments'] = [] + if issue['comments'] == 0: + issue['comments'] = [] else: # We have multiple comments and need to make api call to get them repo = github_client.get_repo(upstream) comments = [] - github_issue = repo.get_issue(number=issue.number) + github_issue = repo.get_issue(number=issue['number']) for comment in github_issue.get_comments(): # First make API call to get the users name comments.append({ @@ -155,68 +319,58 @@ def github_issues(upstream, config): 'changed': None }) # Assign the message with the newly formatted comments :) - final_issue['comments'] = comments + issue['comments'] = comments # Update reporter: # Search for the user - reporter = github_client.get_user(issue.user.login) - final_issue['user'] = {} + reporter = github_client.get_user(issue['user']['login']) + # Update the reporter field in the message (to match Pagure format) if reporter.name: - final_issue['user']['fullname'] = reporter.name + issue['user']['fullname'] = reporter.name else: - final_issue['user']['fullname'] = issue.user.login + issue['user']['fullname'] = issue['user']['login'] # Update assignee(s): assignees = [] - for person in issue.assignees: - assignee = github_client.get_user(person.login) + for person in issue['assignees']: + assignee = github_client.get_user(person['login']) assignees.append({'fullname': assignee.name}) # Update the assignee field in the message (to match Pagure format) - final_issue['assignees'] = assignees + issue['assignees'] = assignees # Update label(s): - if issue.labels: + if issue['labels']: # loop through all the labels on Github and add them # to the new label list and then reassign the message new_label = [] - for label in issue.labels: - new_label.append(label.name) - final_issue['labels'] = new_label - else: - final_issue['labels'] = [] + for label in issue['labels']: + new_label.append(label['name']) + issue['labels'] = new_label # Update milestone: - if issue.milestone: - final_issue['milestone'] = issue.milestone.title - else: - final_issue['milestone'] = None - - # Finish up creating any other mappings - final_issue['state'] = issue.state - final_issue['title'] = issue.title - final_issue['html_url'] = issue.html_url - final_issue['body'] = issue.body - final_issue['assignees'] = issue.assignees - final_issue['state'] = issue.state - final_issue['id'] = issue.id - final_issue['number'] = issue.number - - final_issues.append(final_issue) + if issue.get('milestone', None): + issue['milestone'] = issue['milestone']['title'] + + final_issues.append(issue) final_issues = list(( i.Issue.from_github(upstream, issue, config) for issue in final_issues + if 'pull_request' not in issue # We don't want to copy these around )) - for issue in final_issues: yield issue -def get_all_github_data(upstream, github_client): - """ Helper function to get all issues for a upstream repo """ - repo = github_client.get_repo(upstream) - for issue in repo.get_issues(): - if (not issue.pull_request): +def get_all_github_data(url, headers): + """ Pagination utility. Obnoxious. """ + link = dict(next=url) + while 'next' in link: + response = _fetch_github_data(link['next'], headers) + for issue in response.json(): + comments = _fetch_github_data(issue['comments_url'], headers) + issue['comments'] = comments.json() yield issue + link = _github_link_field_to_dict(response.headers.get('link', None)) def _github_link_field_to_dict(field): diff --git a/sync2jira/upstream_pr.py b/sync2jira/upstream_pr.py index b049582..b1aa37d 100644 --- a/sync2jira/upstream_pr.py +++ b/sync2jira/upstream_pr.py @@ -37,18 +37,60 @@ log = logging.getLogger('sync2jira') -def handle_github_message(config, msg): +def handle_pagure_message(msg, config, suffix): + """ + Handle Pagure message from FedMsg. + + :param Dict msg: FedMsg Message + :param Dict config: Config File + :returns: Issue object + :rtype: sync2jira.intermediary.PR + """ + # Extract our upstream name + upstream = msg['msg']['pullrequest']['project']['name'] + ns = msg['msg']['pullrequest']['project'].get('namespace') or None + if ns: + upstream = '{ns}/{upstream}'.format(ns=ns, upstream=upstream) + mapped_repos = config['sync2jira']['map']['pagure'] + + # Check if we should sync this PR + if upstream not in mapped_repos: + log.debug("%r not in Pagure map: %r", upstream, mapped_repos.keys()) + return None + elif 'pullrequest' not in mapped_repos[upstream]['sync']: + log.debug("%r not in Pagure PR map: %r", upstream, mapped_repos.keys()) + return None + + # Format the assignee field to match github (i.e. in a list) + msg['msg']['pullrequest']['assignee'] = [msg['msg']['pullrequest']['assignee']] + + # Update suffix, Pagure suffix only register as comments + if msg['msg']['pullrequest']['status'] == 'Closed': + suffix = 'closed' + elif msg['msg']['pullrequest']['status'] == 'Merged': + suffix = 'merged' + elif msg['msg']['pullrequest'].get('closed_by') and \ + msg['msg']['pullrequest']['status'] == 'Open': + suffix = 'reopened' + elif msg['msg']['pullrequest']['status'] == 'Open': + suffix = 'open' + + return i.PR.from_pagure(upstream, msg['msg']['pullrequest'], suffix, config) + + +def handle_github_message(msg, config, suffix): """ Handle GitHub message from FedMsg. - :param Dict msg: Webhook Message + :param Dict msg: FedMsg Message :param Dict config: Config File + :param String suffix: FedMsg suffix :returns: Issue object :rtype: sync2jira.intermediary.PR """ # Create our title (i.e. owner/repo) - owner = msg['repository']['owner']['login'] - repo = msg['repository']['name'] + owner = msg['msg']['repository']['owner']['login'] + repo = msg['msg']['repository']['name'] upstream = '{owner}/{repo}'.format(owner=owner, repo=repo) # Check if upstream is in mapped repos @@ -65,13 +107,13 @@ def handle_github_message(config, msg): github_client = Github(config['sync2jira']['github_token']) # If there are no comments just make an empty array - if msg['pull_request']['comments'] == 0: - msg['pull_request']['comments'] = [] + if msg['msg']['pull_request']['comments'] == 0: + msg['msg']['pull_request']['comments'] = [] else: # We have multiple comments and need to make api call to get them repo = github_client.get_repo(upstream) comments = [] - github_pr = repo.get_pull(number=msg['pull_request']['number']) + github_pr = repo.get_pull(number=msg['msg']['pull_request']['number']) for comment in github_pr.get_issue_comments(): # First make API call to get the users name comments.append({ @@ -83,47 +125,83 @@ def handle_github_message(config, msg): 'changed': None }) # Assign the message with the newly formatted comments :) - msg['pull_request']['comments'] = comments + msg['msg']['pull_request']['comments'] = comments # Search for the user - reporter = github_client.get_user(msg['pull_request']['user']['login']) + reporter = github_client.get_user(msg['msg']['pull_request']['user']['login']) # Update the reporter field in the message (to match Pagure format) if reporter.name: - msg['pull_request']['user']['fullname'] = reporter.name + msg['msg']['pull_request']['user']['fullname'] = reporter.name else: - msg['pull_request']['user']['fullname'] = \ - msg['pull_request']['user']['login'] + msg['msg']['pull_request']['user']['fullname'] = \ + msg['msg']['pull_request']['user']['login'] # Now do the same thing for the assignees assignees = [] - for person in msg['pull_request']['assignees']: + for person in msg['msg']['pull_request']['assignees']: assignee = github_client.get_user(person['login']) assignees.append({'fullname': assignee.name}) # Update the assignee field in the message (to match Pagure format) - msg['pull_request']['assignees'] = assignees + msg['msg']['pull_request']['assignees'] = assignees # Update the label field in the message (to match Pagure format) - if msg['pull_request']['labels']: + if msg['msg']['pull_request']['labels']: # loop through all the labels on Github and add them # to the new label list and then reassign the message new_label = [] - for label in msg['pull_request']['labels']: + for label in msg['msg']['pull_request']['labels']: new_label.append(label['name']) - msg['pull_request']['labels'] = new_label + msg['msg']['pull_request']['labels'] = new_label # Update the milestone field in the message (to match Pagure format) - if msg['pull_request']['milestone']: - msg['pull_request']['milestone'] = msg['pull_request']['milestone']['title'] + if msg['msg']['pull_request']['milestone']: + msg['msg']['pull_request']['milestone'] = msg['msg']['pull_request']['milestone']['title'] + + return i.PR.from_github(upstream, msg['msg']['pull_request'], suffix, config) + + +def pagure_prs(upstream, config): + """ + Creates a Generator for all Pagure PRs in upstream repo. - # Determin the suffix - suffix = msg['action'] - if (suffix == 'closed'): - # Check if this PR has been merged - if (msg['pull_request']['merged_at'] is not None): - suffix = 'merged' + :param String upstream: Upstream Repo + :param Dict config: Config Dict + :returns: Pagure Issue object generator + :rtype: sync2jira.intermediary.PR + """ + # Build our our URL + base = config['sync2jira'].get('pagure_url', 'https://pagure.io') + url = base + '/api/0/' + upstream + '/pull-requests' + + # Get our filters + params = config['sync2jira']\ + .get('filters', {})\ + .get('pagure', {}) \ + .get(upstream, {}) + + # Make a GET call to Pagure.io + response = requests.get(url, params=params) + + # Catch if we have an error + if not bool(response): + try: + reason = response.json() + except Exception: + reason = response.text + raise IOError("response: %r %r %r" % (response, reason, response.request.url)) + + # Extract and format our data + data = response.json()['requests'] + + # Reformat Assignee + for pr in data: + pr['assignee'] = [pr['assignee']] - return i.PR.from_github(upstream, msg['pull_request'], suffix, config) + # Build our final list of data and yield + prs = (i.PR.from_pagure(upstream, pr, 'open', config) for pr in data) + for pr in prs: + yield pr def github_prs(upstream, config): @@ -138,24 +216,43 @@ def github_prs(upstream, config): # Get our GitHub token token = config['sync2jira'].get('github_token') - github_client = Github(config['sync2jira']['github_token']) + # Throw warning if we don't have a token set up + if not token: + headers = {} + log.warning('No github_token found. We will be rate-limited...') + else: + headers = {'Authorization': 'token ' + token} + + # Get our filters + _filter = config['sync2jira'] \ + .get('filters', {}) \ + .get('github', {}) \ + .get(upstream, {}) + + # Build our URL + url = 'https://api.github.com/repos/%s/pulls' % upstream + if _filter: + url += '?' + urlencode(_filter) # Get our issues using helper functions - prs = get_all_github_prs(upstream, github_client) + prs = u_issue.get_all_github_data(url, headers) + + # Initialize Github object so we can get their full name (instead of their username) + # And get comments if needed + github_client = Github(config['sync2jira']['github_token']) # Build our final list of prs final_prs = [] for pr in prs: - final_pr = {} # Update comments: # If there are no comments just make an empty array - if pr.comments == 0: - final_pr['comments'] = [] + if len(pr['comments']) == 0: + pr['comments'] = [] else: # We have multiple comments and need to make api call to get them repo = github_client.get_repo(upstream) comments = [] - github_pr = repo.get_pull(number=pr.number) + github_pr = repo.get_pull(number=pr['number']) for comment in github_pr.get_issue_comments(): # First make API call to get the users name comments.append({ @@ -167,58 +264,43 @@ def github_prs(upstream, config): 'changed': None }) # Assign the message with the newly formatted comments :) - final_pr['comments'] = comments + pr['comments'] = comments # Update reporter: # Search for the user - reporter = github_client.get_user(pr.user.login) + reporter = github_client.get_user(pr['user']['login']) # Update the reporter field in the message (to match Pagure format) - final_pr['user'] = {} if reporter.name: - final_pr['user']['fullname'] = reporter.name + pr['user']['fullname'] = reporter.name else: - final_pr['user']['fullname'] = pr.user.login + pr['user']['fullname'] = pr['user']['login'] # Update assignee(s): assignees = [] - for person in pr.assignees: - assignee = github_client.get_user(person.login) + for person in pr.get('assignees', []): + assignee = github_client.get_user(person['login']) assignees.append({'fullname': assignee.name}) # Update the assignee field in the message (to match Pagure format) - final_pr['assignees'] = assignees + pr['assignees'] = assignees # Update label(s): - if pr.labels: + if pr['labels']: # loop through all the labels on Github and add them # to the new label list and then reassign the message new_label = [] - for label in pr.labels: + for label in pr['labels']: new_label.append(label['name']) - final_pr['labels'] = new_label + pr['labels'] = new_label # Update milestone: - if pr.milestone: - final_pr['milestone'] = pr.milestone.title - - # Finish up creating any other mappings - final_pr['html_url'] = pr.html_url - final_pr['title'] = pr.title - final_pr['body'] = pr.body - final_pr['number'] = pr.number - - final_prs.append(final_pr) + if pr.get('milestone', []): + pr['milestone'] = pr['milestone']['title'] + + final_prs.append(pr) # Build our final list of data and yield final_prs = list(( i.PR.from_github(upstream, pr, 'open', config) for pr in final_prs )) for issue in final_prs: yield issue - - -def get_all_github_prs(upstream, github_client): - """ Helper function to get all Prs for an upstream repo """ - repo = github_client.get_repo(upstream) - for issue in repo.get_issues(): - if (issue.pull_request): - yield issue \ No newline at end of file diff --git a/test-requirements.txt b/test-requirements.txt new file mode 100644 index 0000000..fbd9fc9 --- /dev/null +++ b/test-requirements.txt @@ -0,0 +1,6 @@ +python-coveralls +coverage +nose +pytest +pytest-cov +mock diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration_tests/integration_test.py b/tests/integration_tests/integration_test.py new file mode 100644 index 0000000..2f54d7e --- /dev/null +++ b/tests/integration_tests/integration_test.py @@ -0,0 +1,152 @@ +""" +This is a helper program to listen for UMB trigger. Test and then deploy Sync2Jira +""" +# Built-In Modules +import os +import logging +import sys +import types + +# Local Modules +from sync2jira.main import main as m +from jira_values import PAGURE, GITHUB +from runtime_config import runtime_config + +# 3rd Party Modules +import jira.client + +# Global Variables +URL = os.environ['JIRA_STAGE_URL'] +USERNAME = os.environ['JIRA_USER'] +PASSWORD = os.environ['JIRA_PASS'] +log = logging.getLogger(__name__) +hdlr = logging.FileHandler('integration_test.log') +formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s') +hdlr.setFormatter(formatter) +log.addHandler(hdlr) +log.setLevel(logging.DEBUG) + + +def main(): + """ + Main message to listen and react to messages. + """ + log.info("[OS-BUILD] Running sync2jira.main...") + + # Make our JIRA client + client = get_jira_client() + + # First init with what we have + m(runtime_test=True, runtime_config=runtime_config) + + # Now we need to make sure that Sync2Jira didn't update anything, + failed = False + + # Compare to our old values + log.info("[OS-BUILD] Comparing values with Pagure...") + try: + compare_data(client, PAGURE) + except Exception as e: + failed = True + log.info(f"[OS-BUILD] When comparing Pagure something went wrong.\nException {e}") + + log.info("[OS-BUILD] Comparing values with GitHub...") + try: + compare_data(client, GITHUB) + except Exception as e: + failed = True + log.info(f"[OS-BUILD] When comparing GitHub something went wrong.\nException {e}") + + if failed: + log.info("[OS-BUILD] Tests have failed :(") + raise Exception() + else: + log.info("[OS-BUILD] Tests have passed :)") + + +def compare_data(client, data): + """ + Helper function to loop over values and compare to ensure they are the same + :param jira.client.JIRA client: JIRA client + :param Dict data: Data used to compare against + :return: True/False if we + """ + # First get our existing JIRA issue + jira_ticket = data['JIRA'] + existing = client.search_issues(f"Key = {jira_ticket}") + + # Throw an error if too many issues were found + if len(existing) > 1: + raise Exception(f"Too many issues were found with ticket {jira_ticket}") + + existing = existing[0] + log.info("TEST - "+existing.fields.summary) + # Check Tags + if data['tags'] != existing.fields.labels: + raise Exception(f"Error when comparing tags for {jira_ticket}\n" + f"Expected: {data['tags']}\n" + f"Actual: {existing.fields.labels}") + + # Check FixVersion + formatted_fixVersion = format_fixVersion(existing.fields.fixVersions) + + if data['fixVersions'] != formatted_fixVersion: + raise Exception(f"Error when comparing fixVersions for {jira_ticket}\n" + f"Expected: {data['fixVersions']}\n" + f"Actual: {formatted_fixVersion}") + + # Check Assignee + if not existing.fields.assignee: + raise Exception(f"Error when comparing assignee for {jira_ticket}\n" + f"Expected: {data['assignee']}\n" + f"Actual: {existing.fields.assignee}") + + elif data['assignee'] != existing.fields.assignee.name: + raise Exception(f"Error when comparing assignee for {jira_ticket}\n" + f"Expected: {data['assignee']}\n" + f"Actual: {existing.fields.assignee.name}") + + # Check Title + if data['title'] != existing.fields.summary: + raise Exception(f"Error when comparing title for {jira_ticket}\n" + f"Expected: {data['title']}\n" + f"Actual: {existing.fields.summary}") + + # Check Descriptions + if data['description'].replace("\n", "").replace(" ", "").replace("\r", "") != existing.fields.description.replace("\n", "").replace(" ", "").replace("\r", ""): + raise Exception(f"Error when comparing descriptions for {jira_ticket}\n" + f"Expected: {data['description']}\n" + f"Actual: {existing.fields.description}") + + +def format_fixVersion(existing): + """ + Helper function to format fixVersions + :param jira.version existing: Existing fixVersions + :return: Formatted fixVersions + :rtype: List + """ + new_list = [] + for version in existing: + new_list.append(version.name) + return new_list + + +def get_jira_client(): + """ + Helper function to get JIRA client + :return: JIRA Client + :rtype: jira.client.JIRA + """ + return jira.client.JIRA(**{ + 'options': { + 'server': URL, + 'verify': False, + }, + 'basic_auth': (USERNAME, PASSWORD), + }) + + +if __name__ == '__main__': + # Call our main method after parsing out message + main() \ No newline at end of file diff --git a/tests/integration_tests/jira_values.py b/tests/integration_tests/jira_values.py new file mode 100644 index 0000000..aabf702 --- /dev/null +++ b/tests/integration_tests/jira_values.py @@ -0,0 +1,16 @@ +PAGURE = { + 'JIRA': 'FACTORY-6185', + 'title': '[Demo_project] Test Issue DO NOT TOUCH', + 'description': '[1580140473] Upstream Reporter: Sid Premkumar \n Upstream issue status: Open\n Upstream description: {quote}Some Description{quote} \n Upstream URL: https://pagure.io/Demo_project/issue/34', + 'fixVersions': ['FY19 Q2'], + 'assignee': 'sid', + 'tags': ['tag1', 'tag2', 'tag3'] +} +GITHUB = { + 'JIRA': 'FACTORY-6186', + 'title': '[sidpremkumar/Demo_repo] Test Issue DO NOT TOUCH', + 'description': '[555670302] Upstream Reporter: Sid Premkumar \n Upstream issue status: Open\nUpstream description: {quote}Some Description{quote} \nUpstream URL: https://github.com/sidpremkumar/Demo_repo/issues/30', + 'fixVersions': ['FY19 Q1'], + 'assignee': 'sid', + 'tags': ['bug'], +} diff --git a/tests/integration_tests/runtime_config.py b/tests/integration_tests/runtime_config.py new file mode 100644 index 0000000..cc19d56 --- /dev/null +++ b/tests/integration_tests/runtime_config.py @@ -0,0 +1,63 @@ +import os + +runtime_config = { + 'sync2jira': { + 'jira': { + 'pnt-jira': { + 'options': { + 'server': os.environ['JIRA_STAGE_URL'], + 'verify': True, + }, + 'basic_auth': ( + os.environ['JIRA_USER'], + os.environ['JIRA_PASS'], + ), + }, + }, + 'github_token': os.environ['SYNC2JIRA_GITHUB_TOKEN'], + 'admins': [{'spremkum', 'spremkum@redhat.com'}, {'rbean', 'rbean@redhat.com'}], + 'initialize': True, + 'testing': False, + 'develop': True, + 'confluence_statistics': False, + + # We don't need legacy mode anymore. Not for a long time. Let's + # remove it soon. + 'legacy_matching': False, + + # Set the default jira to be pnt-jira + 'default_jira_instance': 'pnt-jira', + + 'filters': { + 'pagure': { + }, + 'github': { + }, + }, + 'map': { + 'pagure': { + 'Demo_project': {'project': 'FACTORY', 'component': 'gitbz', + 'issue_updates': [{'transition': True}, + 'description', + 'title', + {'tags': {'overwrite': True}}, + {'fixVersion': {'overwrite': True}}, + {'assignee': {'overwrite': True}}, + 'url'], + 'sync': ['issue']}, + }, + 'github': { + 'sidpremkumar/Demo_repo': {'project': 'FACTORY', 'component': 'gitbz', + 'issue_updates': [{'transition': True}, + 'description', + 'title', + {'tags': {'overwrite': True}}, + {'fixVersion': {'overwrite': True}}, + {'assignee': {'overwrite': True}}, + 'url'], + 'sync': ['issue']} + + }, + }, + } +} diff --git a/tests/test_confluence_client.py b/tests/test_confluence_client.py new file mode 100644 index 0000000..a979555 --- /dev/null +++ b/tests/test_confluence_client.py @@ -0,0 +1,306 @@ +import unittest + +import mock + +try: + # Python 3.3 > + from unittest.mock import MagicMock # noqa: F401 +except ImportError: + from mock import MagicMock # noqa: F401 + +PATH = 'sync2jira.confluence_client.' + +from sync2jira.confluence_client import ConfluenceClient + + +class TestConfluenceClient(unittest.TestCase): + """ + This class tests the confluence_client.py file + """ + + @mock.patch(PATH + 'ConfluenceClient.find_page') + def setUp(self, + mock_find_page): + mock_find_page.return_value = "mock_page_id" + self.confluence_client = ConfluenceClient() + + self.mock_resp_bad = MagicMock() + self.mock_resp_bad.ok = False + + def test_update_state_value(self): + """ + This function tests the 'update_stat_value' function + """ + # Call the function + self.confluence_client.update_stat_value(True) + + # Assert Everything was called correctly + self.assertEqual(self.confluence_client.update_stat, True) + + @mock.patch(PATH + 'ConfluenceClient.get_auth_object') + @mock.patch(PATH + 'requests') + def test_req_kwargs_basic(self, + mock_requests, + mock_get_auth_object): + """ + This function tests 'req_kwargs' property with a basic client + """ + # Set up return values + mock_get_auth_object.return_value = 'mock_auth_object' + + # Call the function + response = self.confluence_client.req_kwargs + + # Assert everything was called correctly + mock_requests.get.assert_not_called() + mock_get_auth_object.assert_called() + self.assertEqual(response, {'auth': 'mock_auth_object'}) + + @mock.patch(PATH + 'requests') + @mock.patch(PATH + 'ConfluenceClient.req_kwargs') + def test_find_page_found(self, + mock_req_kwargs, + mock_requests): + """ + This function tests the 'find_page' function where we find a page + """ + # Set up return values + mock_resp = MagicMock() + mock_resp.json.return_value = {'results': [{'id': 'mock_id'}]} + mock_requests.get.return_value = mock_resp + + # Call the function + response = self.confluence_client.find_page() + + # Assert everything was called correctly + mock_requests.get.assert_called_with( + "http://mock_confluence_url/rest/api/content/search?cql=title='mock_confluence_page_title' and space=mock_confluence_space") + mock_resp.json.assert_called() + self.assertEqual(response, 'mock_id') + + @mock.patch(PATH + 'requests') + @mock.patch(PATH + 'ConfluenceClient.req_kwargs') + def test_find_page_not_found(self, + mock_req_kwargs, + mock_requests): + """ + This function tests the 'find_page' function where we don't find a page + """ + # Set up return values + mock_resp = MagicMock() + mock_resp.json.return_value = {'results': []} + mock_requests.get.return_value = mock_resp + + # Call the function + response = self.confluence_client.find_page() + + # Assert everything was called correctly + mock_requests.get.assert_called_with( + "http://mock_confluence_url/rest/api/content/search?cql=title='mock_confluence_page_title' and space=mock_confluence_space") + mock_resp.json.assert_called() + self.assertEqual(response, None) + + @mock.patch(PATH + 'requests') + @mock.patch(PATH + 'ConfluenceClient.req_kwargs') + def test_find_page_error(self, + mock_req_kwargs, + mock_requests): + """ + This function tests the 'find_page' function where we get an Error + """ + # Set up return values + mock_resp = MagicMock() + mock_resp.json.return_value = {'results': []} + mock_requests.get.return_value = self.mock_resp_bad + + # Call the function + self.confluence_client.find_page() + + # Assert everything was called correctly + mock_requests.get.assert_called_with( + "http://mock_confluence_url/rest/api/content/search?cql=title='mock_confluence_page_title' and space=mock_confluence_space") + + @mock.patch(PATH + 'requests') + @mock.patch(PATH + 'ConfluenceClient.req_kwargs') + def test_get_page_info(self, + mock_req_kwargs, + mock_requests): + """ + This function tests the 'get_page_info' function where we have no Errors + """ + # Set up return values + mock_resp = MagicMock() + mock_resp.json.return_value = 'mock_json' + mock_requests.get.return_value = mock_resp + + # Call the function + response = self.confluence_client.get_page_info('mock_page_id') + + # Assert everything was called correctly + mock_requests.get.assert_called_with( + 'http://mock_confluence_url/rest/api/content/mock_page_id?expand=ancestors,version,body.storage') + self.assertEqual(response, 'mock_json') + + @mock.patch(PATH + 'requests') + @mock.patch(PATH + 'ConfluenceClient.req_kwargs') + def test_get_page_info_error(self, + mock_req_kwargs, + mock_requests): + """ + This function tests the 'get_page_info' function where we have Errors + """ + # Set up return values + mock_resp = MagicMock() + mock_resp.json.return_value = 'mock_json' + mock_requests.get.return_value = self.mock_resp_bad + + # Call the function + self.confluence_client.get_page_info('mock_page_id') + + # Assert everything was called correctly + mock_requests.get.assert_called_with( + 'http://mock_confluence_url/rest/api/content/mock_page_id?expand=ancestors,version,body.storage') + + @mock.patch(PATH + 'ConfluenceClient.get_page_info') + @mock.patch(PATH + 'requests') + @mock.patch(PATH + 'ConfluenceClient.req_kwargs') + def test_update_page(self, + mock_req_kwargs, + mock_requests, + mock_get_page_info): + """ + This function tests the 'update_page' function where we have no Errors + """ + # Set up return values + mock_get_page_info.return_value = { + 'version': {'number': 1}, + 'title': 'mock_title'} + mock_resp = MagicMock() + mock_resp.ok = True + mock_resp.json.return_value = 'mock_json' + mock_requests.put.return_value = mock_resp + + # Call the function + response = self.confluence_client.update_page( + page_id='mock_page_id', + html_str='mock_html_str', + ) + + # Assert everything was called correctly + mock_requests.put.assert_called_with( + 'http://mock_confluence_url/rest/api/content/mock_page_id', + json={'id': 'mock_page_id', 'type': 'page', + 'title': 'mock_title', 'version': {'number': 2}, + 'body': {'storage': + {'representation': 'storage', 'value': 'mock_html_str'}}}) + self.assertEqual(response, 'mock_json') + + @mock.patch(PATH + 'ConfluenceClient.get_page_info') + @mock.patch(PATH + 'requests') + @mock.patch(PATH + 'ConfluenceClient.req_kwargs') + def test_update_page_error(self, + mock_req_kwargs, + mock_requests, + mock_get_page_info): + """ + This function tests the 'update_page' function where we have Errors + """ + # Set up return values + mock_get_page_info.return_value = { + 'version': {'number': 1}, + 'title': 'mock_title'} + mock_requests.put.return_value = self.mock_resp_bad + + # Call the function + self.confluence_client.update_page( + page_id='mock_page_id', + html_str='mock_html_str', + ) + + # Assert everything was called correctly + mock_requests.put.assert_called_with( + 'http://mock_confluence_url/rest/api/content/mock_page_id', + json={ + 'id': 'mock_page_id', + 'type': 'page', + 'title': 'mock_title', + 'version': {'number': 2}, + 'body': + {'storage': {'representation': 'storage', 'value': 'mock_html_str'}}}) + + @mock.patch(PATH + 'HTTPBasicAuth') + def test_get_auth_object_basic(self, + mock_basic,): + """ + This function tests 'get_auth_object' with basic auth + """ + # Set up return values + mock_basic.return_value = 'mock_basic_auth' + + # Call the function + response = self.confluence_client.get_auth_object() + + # Assert everything was called correctly + self.assertEqual(response, 'mock_basic_auth') + mock_basic.assert_called_with('mock_confluence_username', 'mock_confluence_password') + + @mock.patch(PATH + 'ConfluenceClient.update_page') + @mock.patch(PATH + 'jinja2') + @mock.patch(PATH + 'ConfluenceClient.get_page_info') + def test_update_stat_page(self, + mock_get_page_info, + mock_jinja2, + mock_update_page): + """ + This function tests 'update_stat_page' function + """ + # Set up return values + mock_html = """ + Created Issues1< + Descriptions1< + Comments1< + Reporters1< + Assignees1< + Status1< + Transitions1< + Titles1< + Tags1< + Fix Version1< + Misc. Fields1< + Total1< + """ + mock_get_page_info.return_value = {'body': {'storage': {'value': mock_html}}} + mock_confluence_data = { + 'Created Issues': 10, + 'Descriptions': 10, + 'Comments': 10, + 'Reporters': 10, + 'Status': 10, + 'Assignees': 10, + 'Transitions': 10, + 'Title': 10, + 'Tags': 10, + 'FixVersion': 10, + 'Misc. Fields': 10, + } + mock_templateLoader = MagicMock() + mock_templateEnv = MagicMock() + mock_template = MagicMock() + mock_template.render.return_value = 'mock_render' + mock_templateEnv.get_template.return_value = mock_template + mock_jinja2.FileSystemLoader.return_value = mock_templateLoader + mock_jinja2.Environment.return_value = mock_templateEnv + + # Call the function + self.confluence_client.update_stat_page(mock_confluence_data) + + # Assert Everything was called correctly + mock_jinja2.FileSystemLoader.assert_called_with(searchpath='usr/local/src/sync2jira/sync2jira/') + mock_jinja2.Environment.assert_called_with(loader=mock_templateLoader) + mock_templateEnv.get_template.assert_called_with('confluence_stat.jinja') + mock_template.render.assert_called_with(confluence_data={ + 'Created Issues': 11, 'Descriptions': 11, 'Comments': 11, + 'Reporters': 11, 'Status': 11, 'Assignees': 11, 'Transitions': 11, + 'Title': 11, 'Tags': 11, 'FixVersion': 11, 'Misc. Fields': 11, + 'Total': 121, 'Total Time': '0:50:25 (HR:MIN:SEC)'}) + mock_update_page.assert_called_with('mock_page_id', 'mock_render') diff --git a/tests/test_downstream_issue.py b/tests/test_downstream_issue.py new file mode 100644 index 0000000..5eceb2e --- /dev/null +++ b/tests/test_downstream_issue.py @@ -0,0 +1,1778 @@ +import mock +import unittest +try: + # Python 3.3 > + from unittest.mock import MagicMock # noqa: F401 +except ImportError: + from mock import MagicMock # noqa: F401 +from datetime import datetime + +import sync2jira.downstream_issue as d +from sync2jira.intermediary import Issue + +from nose.tools import eq_ +import jira.client +from jira import JIRAError + +PATH = 'sync2jira.downstream_issue.' + + +class TestDownstreamIssue(unittest.TestCase): + """ + This class tests the downstream_issue.py file under sync2jira + """ + def setUp(self): + """ + Setting up the testing environment + """ + # Mock Config dict + self.mock_config = { + 'sync2jira': { + 'default_jira_instance': 'another_jira_instance', + 'jira': { + 'mock_jira_instance': {'mock_jira': 'mock_jira'}, + 'another_jira_instance': {'basic_auth': ['mock_user'], + 'options': {'server': 'mock_server'}} + }, + 'testing': {}, + 'legacy_matching': False, + 'admins': [{'mock_admin': 'mock_email'}], + 'develop': False + }, + } + + # Mock sync2jira.intermediary.Issue + self.mock_issue = MagicMock() + self.mock_issue.assignee = [{'fullname': 'mock_user'}] + self.mock_issue.downstream = { + 'project': 'mock_project', + 'custom_fields': {'somecustumfield': 'somecustumvalue'}, + 'type': 'Fix', + 'qa-contact': 'dummy@dummy.com', + 'epic-link': 'DUMMY-1234', + 'EXD-Service': {'guild': 'EXD-Project', 'value': 'EXD-Value'}, + 'issue_updates': [ + 'comments', + {'tags': {'overwrite': False}}, + {'fixVersion': {'overwrite': False}}, + {'assignee': {'overwrite': True}}, 'description', 'title', + {'transition': 'CUSTOM TRANSITION'}, + {'on_close': {"apply_labels": ["closed-upstream"]}} + ], + 'owner': 'mock_owner' + } + self.mock_issue.content = 'mock_content' + self.mock_issue.reporter = {'fullname': 'mock_user'} + self.mock_issue.url = 'mock_url' + self.mock_issue.title = 'mock_title' + self.mock_issue.comments = 'mock_comments' + self.mock_issue.tags = ['tag1', 'tag2'] + self.mock_issue.fixVersion = ['fixVersion3', 'fixVersion4'] + self.mock_issue.fixVersion = ['fixVersion3', 'fixVersion4'] + self.mock_issue.assignee = [{'fullname': 'mock_assignee'}] + self.mock_issue.status = 'Open' + self.mock_issue.id = '1234' + + # Mock issue updates + self.mock_updates = [ + 'comments', + {'tags': {'overwrite': False}}, + {'fixVersion': {'overwrite': False}}, + {'assignee': {'overwrite': True}}, 'description', 'title', + {'transition': 'CUSTOM TRANSITION'}, + {'on_close': {"apply_labels": ["closed-upstream"]}} + ] + + # Mock Jira transition + self.mock_transition = [{ + 'name': 'custom_closed_status', + 'id': 1234 + }] + + # Mock jira.resources.Issue + self.mock_downstream = MagicMock() + self.mock_downstream.id = 1234 + self.mock_downstream.fields.labels = ['tag3', 'tag4'] + mock_version1 = MagicMock() + mock_version1.name = 'fixVersion3' + mock_version2 = MagicMock() + mock_version2.name = 'fixVersion4' + self.mock_downstream.fields.fixVersions = [mock_version1, mock_version2] + self.mock_downstream.update.return_value = True + self.mock_downstream.fields.description = "This is an existing description" + + # Mock datetime.today() + self.mock_today = MagicMock() + self.mock_today.strftime.return_value = 'mock_today' + + @mock.patch('jira.client.JIRA') + def test_get_jira_client_not_issue(self, + mock_client): + """ + This tests 'get_jira_client' function where the passed in + argument is not an Issue instance + """ + # Call the function + with self.assertRaises(Exception): + d.get_jira_client( + issue='string', + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.assert_not_called() + + @mock.patch('jira.client.JIRA') + def test_get_jira_client_not_instance(self, + mock_client): + """ + This tests 'get_jira_client' function there is no JIRA instance + """ + # Set up return values + self.mock_issue.downstream = {} + self.mock_config['sync2jira']['default_jira_instance'] = {} + + # Call the function + with self.assertRaises(Exception): + d.get_jira_client( + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.assert_not_called() + + @mock.patch('jira.client.JIRA') + def test_get_jira_client(self, + mock_client): + """ + This tests 'get_jira_client' function where everything goes smoothly + """ + # Set up return values + mock_issue = MagicMock(spec=Issue) + mock_issue.downstream = {'jira_instance': 'mock_jira_instance'} + mock_client.return_value = 'Successful call!' + + # Call the function + + response = d.get_jira_client( + issue=mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.assert_called_with(mock_jira='mock_jira') + self.assertEqual('Successful call!', response) + + @mock.patch('jira.client.JIRA') + def test_get_existing_legacy(self, client): + """ + This tests '_get_existing_jira_issue_legacy' function + """ + class MockIssue(object): + downstream = {'key': 'value'} + url = 'wat' + issue = MockIssue() + config = self.mock_config + # Ensure that we get results back from the jira client. + target1 = "target1" + client.return_value.search_issues = mock.MagicMock(return_value=[target1]) + result = d._get_existing_jira_issue_legacy(jira.client.JIRA(), issue, config) + eq_(result, target1) + + client.return_value.search_issues.assert_called_once_with( + "'External issue URL'='wat' AND 'key'='value' AND " + "(resolution is null OR resolution = Duplicate)", + ) + + @mock.patch('jira.client.JIRA') + def test_get_existing_newstyle(self, client): + config = self.mock_config + + class MockIssue(object): + downstream = {'key': 'value'} + title = 'A title, a title...' + url = 'http://threebean.org' + + + issue = MockIssue() + mock_results_of_query = MagicMock() + mock_results_of_query.fields.summary = 'A title, a title...' + + client.return_value.search_issues.return_value = [mock_results_of_query] + result = d._get_existing_jira_issue(jira.client.JIRA(), issue, config) + # Ensure that we get the mock_result_of_query as a result + self.assertEqual(result, mock_results_of_query) + + client.return_value.search_issues.assert_called_once_with( + 'issueFunction in linkedIssuesOfRemote("Upstream issue") and ' + 'issueFunction in linkedIssuesOfRemote("http://threebean.org")' + ) + + @mock.patch('jira.client.JIRA') + def test_upgrade_oldstyle_jira_issue(self, client): + config = self.mock_config + + class MockIssue(object): + downstream = {'key': 'value'} + title = 'A title, a title...' + url = 'http://threebean.org' + + downstream = mock.MagicMock() + issue = MockIssue() + client_obj = mock.MagicMock() + client.return_value = client_obj + d._upgrade_jira_issue(jira.client.JIRA(), downstream, issue, config) + + remote = { + 'url': 'http://threebean.org', + 'title': 'Upstream issue', + } + client_obj.add_remote_link.assert_called_once_with(downstream.id, remote) + + + @mock.patch('jira.client.JIRA') + def test_assign_user(self, mock_client): + """ + Test 'assign_user' function where remove_all flag is False + """ + # Set up return values + mock_user = MagicMock() + mock_user.displayName = 'mock_assignee' + mock_user.key = 'mock_user_key' + mock_client.search_assignable_users_for_issues.return_value = [mock_user] + mock_client.assign_issue.return_value = True + + # Call the assign user function + d.assign_user( + issue=self.mock_issue, + downstream=self.mock_downstream, + client=mock_client + ) + + # Assert that all calls mocked were called properly + self.mock_downstream.update({'assignee': {'name': 1234}}) + mock_client.search_assignable_users_for_issues.assert_called_with( + 'mock_assignee', + project='mock_project' + ) + + @mock.patch('jira.client.JIRA') + def test_assign_user_with_owner(self, mock_client): + """ + Test 'assign_user' function where remove_all flag is False + """ + # Set up return values + mock_user = MagicMock() + mock_user.displayName = 'mock_assignee' + mock_user.key = 'mock_user_key' + mock_client.search_assignable_users_for_issues.return_value = [] + mock_client.assign_issue.return_value = True + + # Call the assign user function + d.assign_user( + issue=self.mock_issue, + downstream=self.mock_downstream, + client=mock_client + ) + + # Assert that all calls mocked were called properly + mock_client.assign_issue.assert_called_with(1234, 'mock_owner') + mock_client.search_assignable_users_for_issues.assert_called_with( + 'mock_assignee', + project='mock_project' + ) + + @mock.patch('jira.client.JIRA') + def test_assign_user_without_owner(self, mock_client): + """ + Test 'assign_user' function where remove_all flag is False + """ + # Set up return values + mock_user = MagicMock() + mock_user.displayName = 'mock_assignee' + mock_user.key = 'mock_user_key' + mock_client.search_assignable_users_for_issues.return_value = [] + mock_client.assign_issue.return_value = True + self.mock_issue.downstream.pop('owner') + + # Call the assign user function + d.assign_user( + issue=self.mock_issue, + downstream=self.mock_downstream, + client=mock_client + ) + + # Assert that all calls mocked were called properly + mock_client.assign_issue.assert_not_called() + mock_client.search_assignable_users_for_issues.assert_called_with( + 'mock_assignee', + project='mock_project' + ) + + @mock.patch('jira.client.JIRA') + def test_assign_user_remove_all(self, mock_client): + """ + Test 'assign_user' function where remove_all flag is True + """ + # Call the assign user function + d.assign_user( + issue=self.mock_issue, + downstream=self.mock_downstream, + client=mock_client, + remove_all=True + ) + + # Assert that all calls mocked were called properly + self.mock_downstream.update.assert_called_with(assignee={'name': ''}) + mock_client.assign_issue.assert_not_called() + mock_client.search_assignable_users_for_issues.assert_not_called() + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + '_update_jira_issue') + @mock.patch(PATH + 'attach_link') + @mock.patch('jira.client.JIRA') + def test_create_jira_issue(self, + mock_client, + mock_attach_link, + mock_update_jira_issue, + mock_confluence_client): + """ + Tests '_create_jira_issue' function + """ + # Set up return values + mock_client.create_issue.return_value = self.mock_downstream + mock_client.fields.return_value = [ + {'name': 'Epic Link', 'id': 'customfield_1'}, + {'name': 'QA Contact', 'id': 'customfield_2'}, + {'name': 'EXD-Service', 'id': 'customfield_3'}, + ] + mock_confluence_client.update_stat = True + + # Call the function + response = d._create_jira_issue( + client=mock_client, + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.create_issue.assert_called_with( + issuetype={'name': 'Fix'}, + project={'key': 'mock_project'}, + somecustumfield='somecustumvalue', + description='[1234] Upstream Reporter: mock_user \n Upstream issue status: Open\nUpstream description: {quote}mock_content{quote}', + summary='mock_title' + ) + mock_attach_link.assert_called_with( + mock_client, + self.mock_downstream, + { + 'url': 'mock_url', + 'title': 'Upstream issue' + } + ) + mock_update_jira_issue.assert_called_with( + self.mock_downstream, + self.mock_issue, + mock_client + ) + self.mock_downstream.update.assert_any_call({'customfield_1': 'DUMMY-1234'}) + self.mock_downstream.update.assert_any_call({'customfield_2': 'dummy@dummy.com'}) + self.mock_downstream.update.assert_any_call( + {"customfield_3": {"value": "EXD-Project", "child": {"value": "EXD-Value"}}}) + self.assertEqual(response, self.mock_downstream) + mock_client.add_comment.assert_not_called() + mock_confluence_client.update_stat_page.assert_called_with( + {'Misc. Fields': 3, 'Created Issues': 1, 'Descriptions': 1, 'Status': 1, 'Reporters': 1} + ) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + '_update_jira_issue') + @mock.patch(PATH + 'attach_link') + @mock.patch('jira.client.JIRA') + def test_create_jira_issue_failed_epic_link(self, + mock_client, + mock_attach_link, + mock_update_jira_issue, + mock_confluence_client): + """ + Tests '_create_jira_issue' function where we fail updating the epic link + """ + # Set up return values + mock_client.create_issue.return_value = self.mock_downstream + mock_client.fields.return_value = [ + {'name': 'Epic Link', 'id': 'customfield_1'}, + {'name': 'QA Contact', 'id': 'customfield_2'}, + {'name': 'EXD-Service', 'id': 'customfield_3'}, + ] + self.mock_downstream.update.side_effect = [JIRAError, 'success', 'success'] + mock_confluence_client.update_stat = True + + # Call the function + response = d._create_jira_issue( + client=mock_client, + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.create_issue.assert_called_with( + issuetype={'name': 'Fix'}, + project={'key': 'mock_project'}, + somecustumfield='somecustumvalue', + description='[1234] Upstream Reporter: mock_user \n Upstream issue status: Open\nUpstream description: {quote}mock_content{quote}', + summary='mock_title' + ) + mock_attach_link.assert_called_with( + mock_client, + self.mock_downstream, + { + 'url': 'mock_url', + 'title': 'Upstream issue' + } + ) + mock_update_jira_issue.assert_called_with( + self.mock_downstream, + self.mock_issue, + mock_client + ) + self.mock_downstream.update.assert_any_call({'customfield_1': 'DUMMY-1234'}) + self.mock_downstream.update.assert_any_call( + {'customfield_2': 'dummy@dummy.com'}) + self.mock_downstream.update.assert_any_call( + {"customfield_3": {"value": "EXD-Project", "child": {"value": "EXD-Value"}}}) + self.assertEqual(response, self.mock_downstream) + mock_client.add_comment.assert_called_with(self.mock_downstream, f"Error adding Epic-Link: DUMMY-1234") + mock_confluence_client.update_stat_page.assert_called_with( + {'Misc. Fields': 3, 'Created Issues': 1, 'Descriptions': 1, 'Status': 1, 'Reporters': 1}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + '_update_jira_issue') + @mock.patch(PATH + 'attach_link') + @mock.patch('jira.client.JIRA') + def test_create_jira_issue_failed_exd_service(self, + mock_client, + mock_attach_link, + mock_update_jira_issue, + mock_confluence_client): + """ + Tests '_create_jira_issue' function where we fail updating the EXD-Service field + """ + # Set up return values + mock_client.create_issue.return_value = self.mock_downstream + mock_client.fields.return_value = [ + {'name': 'Epic Link', 'id': 'customfield_1'}, + {'name': 'QA Contact', 'id': 'customfield_2'}, + {'name': 'EXD-Service', 'id': 'customfield_3'}, + ] + self.mock_downstream.update.side_effect = ['success', 'success', JIRAError] + mock_confluence_client.update_stat = True + + # Call the function + response = d._create_jira_issue( + client=mock_client, + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.create_issue.assert_called_with( + issuetype={'name': 'Fix'}, + project={'key': 'mock_project'}, + somecustumfield='somecustumvalue', + description='[1234] Upstream Reporter: mock_user \n Upstream issue status: Open\nUpstream description: {quote}mock_content{quote}', + summary='mock_title' + ) + mock_attach_link.assert_called_with( + mock_client, + self.mock_downstream, + { + 'url': 'mock_url', + 'title': 'Upstream issue' + } + ) + mock_update_jira_issue.assert_called_with( + self.mock_downstream, + self.mock_issue, + mock_client + ) + self.mock_downstream.update.assert_any_call({'customfield_1': 'DUMMY-1234'}) + self.mock_downstream.update.assert_any_call( + {'customfield_2': 'dummy@dummy.com'}) + self.mock_downstream.update.assert_any_call( + {"customfield_3": {"value": "EXD-Project", "child": {"value": "EXD-Value"}}}) + self.assertEqual(response, self.mock_downstream) + mock_client.add_comment.assert_called_with(self.mock_downstream, + f"Error adding EXD-Service field.\n" + f"Project: {self.mock_issue.downstream['EXD-Service']['guild']}\n" + f"Value: {self.mock_issue.downstream['EXD-Service']['value']}") + mock_confluence_client.update_stat_page.assert_called_with( + {'Misc. Fields': 3, 'Created Issues': 1, 'Descriptions': 1, 'Status': 1, 'Reporters': 1}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + '_update_jira_issue') + @mock.patch(PATH + 'attach_link') + @mock.patch('jira.client.JIRA') + def test_create_jira_issue_no_updates(self, + mock_client, + mock_attach_link, + mock_update_jira_issue, + mock_confluence_client): + """ + Tests '_create_jira_issue' function where we have + no updates + """ + # Set up return values + mock_client.create_issue.return_value = self.mock_downstream + self.mock_issue.downstream['issue_updates'] = [] + mock_confluence_client.update_stat = True + + # Call the function + response = d._create_jira_issue( + client=mock_client, + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.create_issue.assert_called_with( + issuetype={'name': 'Fix'}, + project={'key': 'mock_project'}, + somecustumfield='somecustumvalue', + description='[1234] Upstream Reporter: mock_user \n ', + summary='mock_title' + ) + mock_attach_link.assert_called_with( + mock_client, + self.mock_downstream, + { + 'url': 'mock_url', + 'title': 'Upstream issue' + } + ) + mock_update_jira_issue.assert_called_with( + self.mock_downstream, + self.mock_issue, + mock_client + ) + self.assertEqual(response, self.mock_downstream) + mock_client.add_comment.assert_not_called() + mock_confluence_client.update_stat_page.assert_called_with({'Misc. Fields': 1, 'Created Issues': 1, 'Reporters': 1}) + + + @mock.patch(PATH + 'get_jira_client') + @mock.patch(PATH + '_get_existing_jira_issue') + @mock.patch(PATH + '_update_jira_issue') + @mock.patch(PATH + '_create_jira_issue') + @mock.patch('jira.client.JIRA') + @mock.patch(PATH + '_get_existing_jira_issue_legacy') + @mock.patch(PATH + 'check_jira_status') + def test_sync_with_jira_matching(self, + mock_check_jira_status, + mock_existing_jira_issue_legacy, + mock_client, + mock_create_jira_issue, + mock_update_jira_issue, + mock_existing_jira_issue, + mock_get_jira_client): + """ + Tests 'sync_with_jira' function where we do find a matching issue + This assumes we're not using the legacy matching anymore + """ + # Set up return values + mock_get_jira_client.return_value = mock_client + mock_existing_jira_issue.return_value = self.mock_downstream + mock_check_jira_status.return_value = True + + # Call the function + d.sync_with_jira( + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert all calls were made correctly + mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config) + mock_update_jira_issue.assert_called_with(self.mock_downstream, self.mock_issue, mock_client) + mock_create_jira_issue.assert_not_called() + mock_existing_jira_issue_legacy.assert_not_called() + + @mock.patch(PATH + 'get_jira_client') + @mock.patch(PATH + '_get_existing_jira_issue') + @mock.patch(PATH + '_update_jira_issue') + @mock.patch(PATH + '_create_jira_issue') + @mock.patch('jira.client.JIRA') + @mock.patch(PATH + '_get_existing_jira_issue_legacy') + @mock.patch(PATH + 'check_jira_status') + def test_sync_with_jira_down(self, + mock_check_jira_status, + mock_existing_jira_issue_legacy, + mock_client, + mock_create_jira_issue, + mock_update_jira_issue, + mock_existing_jira_issue, + mock_get_jira_client): + """ + Tests 'sync_with_jira' function where the JIRA scriptrunner is down + """ + # Set up return values + mock_get_jira_client.return_value = mock_client + mock_existing_jira_issue.return_value = self.mock_downstream + mock_check_jira_status.return_value = False + + # Call the function + with self.assertRaises(JIRAError): + d.sync_with_jira( + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert all calls were made correctly + mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config) + mock_update_jira_issue.assert_not_called() + mock_create_jira_issue.assert_not_called() + mock_existing_jira_issue_legacy.assert_not_called() + + @mock.patch(PATH + 'get_jira_client') + @mock.patch(PATH + '_get_existing_jira_issue') + @mock.patch(PATH + '_update_jira_issue') + @mock.patch(PATH + '_create_jira_issue') + @mock.patch('jira.client.JIRA') + @mock.patch(PATH + '_get_existing_jira_issue_legacy') + @mock.patch(PATH + 'check_jira_status') + def test_sync_with_jira_no_matching(self, + mock_check_jira_status, + mock_existing_jira_issue_legacy, + mock_client, + mock_create_jira_issue, + mock_update_jira_issue, + mock_existing_jira_issue, + mock_get_jira_client): + """ + Tests 'sync_with_jira' function where we do NOT find a matching issue + This assumes we're not using the legacy matching anymore + """ + # Set up return values + mock_get_jira_client.return_value = mock_client + mock_existing_jira_issue.return_value = None + mock_check_jira_status.return_value = True + + # Call the function + d.sync_with_jira( + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert all calls were made correctly + mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config) + mock_update_jira_issue.assert_not_called() + mock_create_jira_issue.assert_called_with(mock_client, self.mock_issue, self.mock_config) + mock_existing_jira_issue_legacy.assert_not_called() + + @mock.patch(PATH + '_update_title') + @mock.patch(PATH + '_update_description') + @mock.patch(PATH + '_update_comments') + @mock.patch(PATH + '_update_tags') + @mock.patch(PATH + '_update_fixVersion') + @mock.patch(PATH + '_update_transition') + @mock.patch(PATH + '_update_assignee') + @mock.patch(PATH + '_update_on_close') + @mock.patch('jira.client.JIRA') + def test_update_jira_issue(self, + mock_client, + mock_update_on_close, + mock_update_assignee, + mock_update_transition, + mock_update_fixVersion, + mock_update_tags, + mock_update_comments, + mock_update_description, + mock_update_title): + """ + This tests '_update_jira_issue' function + """ + # Call the function + d._update_jira_issue( + existing=self.mock_downstream, + issue=self.mock_issue, + client=mock_client + ) + + # Assert all calls were made correctly + mock_update_comments.assert_called_with( + mock_client, + self.mock_downstream, + self.mock_issue + ) + mock_update_tags.assert_called_with( + self.mock_updates, + self.mock_downstream, + self.mock_issue + ) + mock_update_fixVersion.assert_called_with( + self.mock_updates, + self.mock_downstream, + self.mock_issue, + mock_client, + ) + mock_update_description.assert_called_with( + self.mock_downstream, + self.mock_issue + ) + mock_update_title.assert_called_with( + self.mock_issue, + self.mock_downstream + ) + mock_update_transition.assert_called_with( + mock_client, + self.mock_downstream, + self.mock_issue + ) + mock_update_on_close.assert_called_once() + + @mock.patch(PATH + 'confluence_client') + @mock.patch('jira.client.JIRA') + def test_update_transition_JIRAError(self, + mock_client, + mock_confluence_client): + """ + This function tests the '_update_transition' function where Upstream issue status + s not in existing.fields.description and transitioning the issue throws an error + """ + # Set up return values + self.mock_issue.status = 'Closed' + self.mock_downstream.fields.description = '' + mock_client.transitions.return_value = [{'name': 'CUSTOM TRANSITION', 'id': '1234'}] + mock_client.transition_issue.side_effect = JIRAError + mock_confluence_client.update_stat = True + + # Call the function + d._update_transition( + client=mock_client, + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with({'description': 'Upstream issue status: Closed\n'}) + mock_client.transitions.assert_called_with(self.mock_downstream) + mock_client.transition_issue.assert_called_with(self.mock_downstream, 1234) + mock_confluence_client.update_stat_page.assert_called_with({'Status': 1}) + + + @mock.patch(PATH + 'confluence_client') + @mock.patch('jira.client.JIRA') + def test_update_transition_not_found(self, + mock_client, + mock_confluence_client): + """ + This function tests the '_update_transition' function where Upstream issue status + not in existing.fields.description and we can't find the appropriate closed status + """ + # Set up return values + self.mock_issue.status = 'Closed' + self.mock_issue.downstream['transition'] = 'bad_transition' + self.mock_downstream.fields.description = '' + mock_client.transitions.return_value = [{'name': 'CUSTOM TRANSITION', 'id': '1234'}] + mock_confluence_client.update_stat = True + + # Call the function + d._update_transition( + client=mock_client, + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with({'description': 'Upstream issue status: Closed\n'}) + mock_client.transitions.assert_called_with(self.mock_downstream) + mock_client.transition_issue.assert_called_with(self.mock_downstream, 1234) + mock_confluence_client.update_stat_page.assert_called_with({'Transition': 1}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch('jira.client.JIRA') + def test_update_transition_successful(self, + mock_client, + mock_confluence_client): + """ + This function tests the '_update_transition' function where everything goes smoothly! + """ + # Set up return values + self.mock_issue.status = 'Closed' + self.mock_downstream.fields.description = '[test] Upstream issue status: Open' + mock_client.transitions.return_value = [{'name': 'CUSTOM TRANSITION', 'id': '1234'}] + mock_confluence_client.update_stat = True + + # Call the function + d._update_transition( + client=mock_client, + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with({'description': 'Upstream issue status: Closed'}) + mock_client.transitions.assert_called_with(self.mock_downstream) + mock_client.transition_issue.assert_called_with(self.mock_downstream, 1234) + mock_confluence_client.update_stat_page.assert_called_with({'Transition': 1}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + '_comment_format') + @mock.patch(PATH + '_comment_matching') + @mock.patch('jira.client.JIRA') + def test_update_comments(self, + mock_client, + mock_comment_matching, + mock_comment_format, + mock_confluence_client): + """ + This function tests the 'update_comments' function + """ + # Set up return values + mock_client.comments.return_value = 'mock_comments' + mock_comment_matching.return_value = ['mock_comments_d'] + mock_comment_format.return_value = 'mock_comment_body' + mock_confluence_client.update_stat = True + + # Call the function + d._update_comments( + client=mock_client, + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + mock_client.comments.assert_called_with(self.mock_downstream) + mock_comment_matching.assert_called_with(self.mock_issue.comments, 'mock_comments') + mock_comment_format.assert_called_with('mock_comments_d') + mock_client.add_comment.assert_called_with(self.mock_downstream, 'mock_comment_body') + mock_confluence_client.update_stat_page.assert_called_with({'Comments': 1}) + + def test_update_fixVersion_JIRAError(self): + """ + This function tests the 'update_fixVersion' function where updating the downstream + issue throws an error + """ + # Set up return values + self.mock_downstream.update.side_effect = JIRAError + self.mock_downstream.fields.fixVersions = [] + mock_client = MagicMock() + + # Call the function + d._update_fixVersion( + updates=self.mock_updates, + existing=self.mock_downstream, + issue=self.mock_issue, + client=mock_client, + ) + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with( + {'fixVersions': [{'name': 'fixVersion3'}, {'name': 'fixVersion4'}]}) + mock_client.add_comment(self.mock_downstream, f"Error updating fixVersion: {self.mock_issue.fixVersion}") + + + def test_update_fixVersion_no_api_call(self): + """ + This function tests the 'update_fixVersion' function existing labels are the same + and thus no API call should be made + """ + # Set up return values + self.mock_downstream.update.side_effect = JIRAError + mock_client = MagicMock() + + # Call the function + d._update_fixVersion( + updates=self.mock_updates, + existing=self.mock_downstream, + issue=self.mock_issue, + client=mock_client, + ) + # Assert all calls were made correctly + self.mock_downstream.update.assert_not_called() + mock_client.add_comment.assert_not_called() + + @mock.patch(PATH + 'confluence_client') + def test_update_fixVersion_successful(self, + mock_confluence_client): + """ + This function tests the 'update_fixVersion' function where everything goes smoothly! + """ + # Set up return values + self.mock_downstream.fields.fixVersions = [] + mock_client = MagicMock() + mock_confluence_client.update_stat = True + + # Call the function + d._update_fixVersion( + updates=self.mock_updates, + existing=self.mock_downstream, + issue=self.mock_issue, + client=mock_client, + ) + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with( + {'fixVersions': [{'name': 'fixVersion3'}, {'name': 'fixVersion4'}]}) + mock_client.add_comment.assert_not_called() + mock_confluence_client.update_stat_page.assert_called_with({'FixVersion': 2}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'assign_user') + @mock.patch('jira.client.JIRA') + def test_update_assignee_assignee(self, + mock_client, + mock_assign_user, + mock_confluence_client): + """ + This function tests the 'update_assignee' function where issue.assignee exists + """ + # Call the function + d._update_assignee( + client=mock_client, + existing=self.mock_downstream, + issue=self.mock_issue, + updates=[{'assignee': {'overwrite': True}}] + ) + + # Assert all calls were made correctly + mock_assign_user.assert_called_with( + mock_client, + self.mock_issue, + self.mock_downstream + ) + mock_confluence_client.update_stat_page.assert_called_with({'Assignee': 1}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'assign_user') + @mock.patch('jira.client.JIRA') + def test_update_assignee_no_assignee(self, + mock_client, + mock_assign_user, + mock_confluence_client): + """ + This function tests the '_update_assignee' function where issue.assignee does not exist + """ + # Set up return values + self.mock_issue.assignee = None + + # Call the function + d._update_assignee( + client=mock_client, + existing=self.mock_downstream, + issue=self.mock_issue, + updates=[{'assignee': {'overwrite': True}}] + ) + + # Assert all calls were made correctly + mock_assign_user.assert_called_with( + mock_client, + self.mock_issue, + self.mock_downstream, + remove_all=True + ) + mock_confluence_client.update_stat_page.assert_called_with({'Assignee': 1}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'assign_user') + @mock.patch('jira.client.JIRA') + def test_update_assignee_no_overwrite(self, + mock_client, + mock_assign_user, + mock_confluence_client): + """ + This function tests the '_update_assignee' function where overwrite is false + """ + # Set up return values + self.mock_downstream.fields.assignee = None + mock_confluence_client.update_stat = True + + # Call the function + d._update_assignee( + client=mock_client, + existing=self.mock_downstream, + issue=self.mock_issue, + updates=[{'assignee': {'overwrite': False}}] + ) + + # Assert all calls were made correctly + mock_assign_user.assert_called_with( + mock_client, + self.mock_issue, + self.mock_downstream + ) + mock_confluence_client.update_stat_page.assert_called_with({'Assignee': 1}) + + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'verify_tags') + @mock.patch(PATH + '_label_matching') + def test_update_tags(self, + mock_label_matching, + mock_verify_tags, + mock_confluence_client): + """ + This function tests the '_update_tags' function + """ + # Set up return values + mock_label_matching.return_value = 'mock_updated_labels' + mock_verify_tags.return_value = ['mock_verified_tags'] + mock_confluence_client.update_stat = True + + # Call the function + d._update_tags( + updates=self.mock_updates, + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + mock_label_matching.assert_called_with( + self.mock_issue.tags, + self.mock_downstream.fields.labels + ) + mock_verify_tags.assert_called_with('mock_updated_labels') + self.mock_downstream.update.assert_called_with({'labels': ['mock_verified_tags']}) + mock_confluence_client.update_stat_page.assert_called_with({'Tags': 1}) + + @mock.patch(PATH + 'verify_tags') + @mock.patch(PATH + '_label_matching') + def test_update_tags_no_api_call(self, + mock_label_matching, + mock_verify_tags): + """ + This function tests the '_update_tags' function where the existing tags are the same + as the new ones + """ + # Set up return values + mock_label_matching.return_value = 'mock_updated_labels' + mock_verify_tags.return_value = ['tag3', 'tag4'] + + # Call the function + d._update_tags( + updates=self.mock_updates, + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + mock_label_matching.assert_called_with( + self.mock_issue.tags, + self.mock_downstream.fields.labels + ) + mock_verify_tags.assert_called_with('mock_updated_labels') + self.mock_downstream.update.assert_not_called() + + @mock.patch(PATH + 'confluence_client') + def test_update_description_update(self, + mock_confluence_client): + """ + This function tests '_update_description' where we just have to update the contents of the description + """ + # Set up return values + self.mock_downstream.fields.description = 'Upstream description: {quote} test {quote}' + mock_confluence_client.update_stat = True + + # Call the function + d._update_description( + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with( + {'description': 'Upstream description: {quote}mock_content{quote}'}) + mock_confluence_client.update_stat_page.assert_called_with({'Description': 1}) + + @mock.patch(PATH + 'confluence_client') + def test_update_description_add_field(self, + mock_confluence_client): + """ + This function tests '_update_description' where we just have to add a description field + """ + # Set up return values + self.mock_downstream.fields.description = '[123] Upstream Reporter: mock_user \n' \ + 'Upstream description: {quote} test {quote}' + + # Call the function + d._update_description( + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with( + {'description': '[123] Upstream Reporter: mock_user \n' + 'Upstream description: {quote}mock_content{quote}'}) + mock_confluence_client.update_stat_page.assert_called_with({'Description': 1}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'datetime') + def test_update_description_add_reporter(self, + mock_datetime, + mock_confluence_client): + """ + This function tests '_update_description' where we have to add a description and upstream reporter field + """ + # Set up return values + self.mock_downstream.fields.description = '[123] Upstream issue status: Open\n' + self.mock_issue.status = 'Open' + self.mock_issue.id = '123' + self.mock_issue.reporter = {'fullname': 'mock_user'} + mock_datetime.today.return_value = self.mock_today + mock_confluence_client.update_stat = True + + # Call the function + d._update_description( + existing=self.mock_downstream, + issue=self.mock_issue + ) + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with( + {'description': '[mock_today] Upstream issue status: Open\n[123]' + ' Upstream Reporter: mock_user\nUpstream description:' + ' {quote}mock_content{quote}\n'}) + mock_confluence_client.update_stat_page.assert_called_with({'Description': 1}) + + @mock.patch(PATH + 'confluence_client') + def test_update_description_add_reporter_no_status(self, + mock_confluence_client): + """ + This function tests '_update_description' where we have to add reporter and description without status + """ + # Set up return values + self.mock_downstream.fields.description = '' + mock_confluence_client.update_stat = True + + # Call the function + d._update_description( + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with( + {'description': '[1234] Upstream Reporter: mock_user \n' + 'Upstream description: {quote}mock_content{quote} \n '}) + mock_confluence_client.update_stat_page.assert_called_with({'Description': 1}) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'datetime') + def test_update_description_add_description(self, + mock_datetime, + mock_confluence_client): + """ + This function tests '_update_description' where we have a reporter and status already + """ + # Set up return values + self.mock_downstream.fields.description = '[123] Upstream issue status: Open\n' \ + '[123] Upstream Reporter: mock_user\n' + self.mock_issue.status = 'Open' + self.mock_issue.id = '123' + self.mock_issue.reporter = {'fullname': 'mock_user'} + mock_datetime.today.return_value = self.mock_today + + # Call the function + d._update_description( + existing=self.mock_downstream, + issue=self.mock_issue + ) + + # Assert all calls were made correctly + self.mock_downstream.update.assert_called_with( + {'description': '[mock_today] Upstream issue status: Open\n' + '[123] Upstream Reporter: mock_user\n' + 'Upstream description: {quote}mock_content{quote}\n'}) + mock_confluence_client.update_stat_page.assert_called_with({'Description': 1}) + + def test_verify_tags(self): + """ + This function tests 'verify_tags' function + """ + # Call the function + response = d.verify_tags( + tags=['this is a tag'] + ) + + # Assert everything was called correctly + self.assertEqual(response, ['this_is_a_tag']) + + @mock.patch(PATH + 'get_jira_client') + @mock.patch(PATH + '_matching_jira_issue_query') + @mock.patch(PATH + '_close_as_duplicate') + @mock.patch('jira.client.JIRA') + @mock.patch(PATH + 'check_jira_status') + def test_close_duplicates_no_matching(self, + mock_check_jira_status, + mock_client, + mock_close_as_duplicate, + mock_matching_jira_issue_query, + mock_get_jira_client): + """ + This tests 'close_duplicates' function where len(results) <= 1 + """ + # Set up return values + mock_get_jira_client.return_value = mock_client + mock_matching_jira_issue_query.return_value = ['only_one_response'] + mock_check_jira_status.return_value = True + + # Call the function + response = d.close_duplicates( + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config) + mock_matching_jira_issue_query.assert_called_with( + mock_client, + self.mock_issue, + self.mock_config, + free=True + ) + mock_close_as_duplicate.assert_not_called() + self.assertEqual(None, response) + + @mock.patch(PATH + 'get_jira_client') + @mock.patch(PATH + '_matching_jira_issue_query') + @mock.patch(PATH + '_close_as_duplicate') + @mock.patch('jira.client.JIRA') + @mock.patch(PATH + 'check_jira_status') + def test_close_duplicates(self, + mock_check_jira_status, + mock_client, + mock_close_as_duplicate, + mock_matching_jira_issue_query, + mock_get_jira_client): + """ + This tests 'close_duplicates' function where len(results) > 1 + """ + # Set up return values + mock_get_jira_client.return_value = mock_client + mock_item = MagicMock() + mock_item.fields.created = 1 + mock_matching_jira_issue_query.return_value = [mock_item, mock_item, mock_item] + mock_check_jira_status.return_value = True + + # Call the function + response = d.close_duplicates( + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config) + mock_matching_jira_issue_query.assert_called_with( + mock_client, + self.mock_issue, + self.mock_config, + free=True + ) + mock_close_as_duplicate.assert_called_with( + mock_client, + mock_item, + mock_item, + self.mock_config + ) + self.assertEqual(None, response) + + @mock.patch('jira.client.JIRA') + def test_close_as_duplicate_errors(self, + mock_client): + """ + This tests '_close_as_duplicate' function where client.transition_issue throws an exception + """ + # Set up return values + class HTTPExceptionHelper(): + text = "Field 'resolution' cannot be set" + + class HTTPException(Exception): + response = HTTPExceptionHelper + + mock_duplicate = MagicMock() + mock_duplicate.permalink.return_value = 'mock_url' + mock_duplicate.key = 'mock_key' + mock_keeper = MagicMock() + mock_keeper.key = 'mock_key' + mock_keeper.permalink.return_value = 'mock_url' + mock_client.transitions.return_value = [{'name': 'Dropped', 'id': '1234'}] + mock_client.comments.return_value = [] + mock_client.transition_issue.side_effect = HTTPException + + # Call the function + d._close_as_duplicate( + client=mock_client, + duplicate=mock_duplicate, + keeper=mock_keeper, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.comments.assert_any_call(mock_keeper) + mock_client.comments.assert_any_call(mock_duplicate) + mock_client.transitions.assert_called_with(mock_duplicate) + mock_client.add_comment.assert_any_call(mock_duplicate, 'Marking as duplicate of mock_key') + mock_client.add_comment.assert_any_call(mock_keeper, 'mock_key is a duplicate.') + mock_client.transition_issue.assert_any_call( + mock_duplicate, + '1234', + resolution={'name': 'Duplicate'} + ) + mock_client.transition_issue.assert_any_call( + mock_duplicate, + '1234' + ) + + @mock.patch('jira.client.JIRA') + def test_close_as_duplicate(self, + mock_client): + """ + This tests '_close_as_duplicate' function where everything goes smoothly + """ + # Set up return values + mock_duplicate = MagicMock() + mock_duplicate.permalink.return_value = 'mock_url' + mock_duplicate.key = 'mock_key' + mock_keeper = MagicMock() + mock_keeper.key = 'mock_key' + mock_keeper.permalink.return_value = 'mock_url' + mock_client.transitions.return_value = [{'name': 'Dropped', 'id': '1234'}] + mock_client.comments.return_value = [] + + # Call the function + d._close_as_duplicate( + client=mock_client, + duplicate=mock_duplicate, + keeper=mock_keeper, + config=self.mock_config + ) + + # Assert everything was called correctly + mock_client.comments.assert_any_call(mock_keeper) + mock_client.comments.assert_any_call(mock_duplicate) + mock_client.transitions.assert_called_with(mock_duplicate) + mock_client.add_comment.assert_any_call(mock_duplicate, 'Marking as duplicate of mock_key') + mock_client.add_comment.assert_any_call(mock_keeper, 'mock_key is a duplicate.') + mock_client.transition_issue.assert_called_with( + mock_duplicate, + '1234', + resolution={'name': 'Duplicate'} + ) + + @mock.patch(PATH + 'alert_user_of_duplicate_issues') + @mock.patch(PATH + 'find_username') + @mock.patch(PATH + 'check_comments_for_duplicate') + @mock.patch('jira.client.JIRA') + def test_matching_jira_issue_query(self, + mock_client, + mock_check_comments_for_duplicates, + mock_find_username, + mock_alert_user_of_duplicate_issues): + """ + This tests '_matching_jira_query' function + """ + # Set up return values + mock_downstream_issue = MagicMock() + self.mock_issue.upstream_title = 'mock_upstream_title' + mock_downstream_issue.fields.description = self.mock_issue.id + bad_downstream_issue = MagicMock() + bad_downstream_issue.fields.description = 'bad' + bad_downstream_issue.fields.summary = 'bad' + mock_client.search_issues.return_value = [mock_downstream_issue, bad_downstream_issue] + mock_check_comments_for_duplicates.return_value = True + mock_find_username.return_value = 'mock_username' + mock_alert_user_of_duplicate_issues.return_value = True + + # Call the function + response = d._matching_jira_issue_query( + client=mock_client, + issue=self.mock_issue, + config=self.mock_config + ) + + # Assert everything was called correctly + self.assertEqual(response, [mock_downstream_issue]) + mock_alert_user_of_duplicate_issues.assert_called_with( + self.mock_issue, + [mock_downstream_issue], + mock_client.search_issues.return_value, + self.mock_config, + mock_client + ) + mock_client.search_issues.assert_called_with( + 'issueFunction in linkedIssuesOfRemote("Upstream issue")' + ' and issueFunction in linkedIssuesOfRemote("mock_url")') + mock_check_comments_for_duplicates.assert_called_with( + mock_client, + mock_downstream_issue, + 'mock_username' + ) + mock_find_username.assert_called_with( + self.mock_issue, + self.mock_config + ) + + @mock.patch(PATH + 'jinja2') + @mock.patch(PATH + 'send_mail') + @mock.patch('jira.client.JIRA') + def test_alert_user(self, + mock_client, + mock_mailer, + mock_jinja,): + """ + This tests 'alert_user_of_duplicate_issues' function + """ + # Set up return values + mock_downstream_issue = MagicMock() + mock_downstream_issue.key = 'mock_key' + bad_downstream_issue = MagicMock() + bad_downstream_issue.key = 'mock_key' + bad_downstream_issue.fields.status.name = 'To Do' + mock_results_of_query = [mock_downstream_issue, bad_downstream_issue] + mock_search_user_result = MagicMock() + mock_search_user_result.displayName = 'mock_name' + mock_search_user_result.emailAddress = 'mock_email' + mock_client.search_users.return_value = [mock_search_user_result] + mock_template = MagicMock(name='template') + mock_template.render.return_value = 'mock_html_text' + mock_template_env = MagicMock(name='templateEnv') + mock_template_env.get_template.return_value = mock_template + mock_jinja.Environment.return_value = mock_template_env + + # Call the function + d.alert_user_of_duplicate_issues( + issue=self.mock_issue, + final_result=[mock_downstream_issue], + results_of_query=mock_results_of_query, + config=self.mock_config, + client=mock_client + ) + + # Assert everything was called correctly + mock_client.search_users.assert_any_call('mock_owner') + mock_client.search_users.assert_any_call('mock_admin') + mock_template.render.assert_called_with( + admins=[{'name': 'mock_name', 'email': 'mock_email'}], + duplicate_issues=[{'url': 'mock_server/browse/mock_key', 'title': 'mock_key'}], + issue=self.mock_issue, + selected_issue={'url': 'mock_server/browse/mock_key', 'title': 'mock_key'}, + user={'name': 'mock_name', 'email': 'mock_email'}) + mock_mailer().send.asset_called_with('test') + + @mock.patch(PATH + 'jinja2') + @mock.patch(PATH + 'send_mail') + @mock.patch('jira.client.JIRA') + def test_alert_user_multiple_users(self, + mock_client, + mock_mailer, + mock_jinja, ): + """ + This tests 'alert_user_of_duplicate_issues' function + where searching returns multiple users + """ + # Set up return values + mock_downstream_issue = MagicMock() + mock_downstream_issue.key = 'mock_key' + bad_downstream_issue = MagicMock() + bad_downstream_issue.key = 'mock_key' + bad_downstream_issue.fields.status.name = 'To Do' + mock_results_of_query = [mock_downstream_issue, bad_downstream_issue] + mock_search_user_result1 = MagicMock() + mock_search_user_result1.displayName = 'bad_name' + mock_search_user_result1.emailAddress = 'bad_email' + mock_search_user_result1.key = 'bad_owner' + mock_search_user_result2 = MagicMock() + mock_search_user_result2.displayName = 'mock_name' + mock_search_user_result2.emailAddress = 'mock_email' + mock_search_user_result2.key = 'mock_owner' + mock_client.search_users.return_value = [mock_search_user_result1, mock_search_user_result2] + mock_template = MagicMock(name='template') + mock_template.render.return_value = 'mock_html_text' + mock_template_env = MagicMock(name='templateEnv') + mock_template_env.get_template.return_value = mock_template + mock_jinja.Environment.return_value = mock_template_env + + # Call the function + d.alert_user_of_duplicate_issues( + issue=self.mock_issue, + final_result=[mock_downstream_issue], + results_of_query=mock_results_of_query, + config=self.mock_config, + client=mock_client + ) + + # Assert everything was called correctly + mock_client.search_users.assert_any_call('mock_owner') + mock_client.search_users.assert_any_call('mock_admin') + mock_template.render.assert_called_with( + admins=[{'name': 'mock_name', 'email': 'mock_email'}], + duplicate_issues=[{'url': 'mock_server/browse/mock_key', 'title': 'mock_key'}], + issue=self.mock_issue, + selected_issue={'url': 'mock_server/browse/mock_key', 'title': 'mock_key'}, + user={'name': 'mock_name', 'email': 'mock_email'}) + mock_mailer().send.asset_called_with('test') + + def test_find_username(self): + """ + Tests 'find_username' function + """ + # Call the function + response = d.find_username( + self.mock_issue, + self.mock_config + ) + + # Assert everything was called correctly + self.assertEqual(response, 'mock_user') + + @mock.patch('jira.client.JIRA') + def test_check_comments_for_duplicates(self, + mock_client): + """ + Tests 'check_comments_for_duplicates' function + """ + # Set up return values + mock_comment = MagicMock() + mock_comment.body = 'Marking as duplicate of TEST-1234' + mock_comment.author.name = 'mock_user' + mock_client.comments.return_value = [mock_comment] + mock_client.issue.return_value = 'Successful Call!' + + # Call the function + response = d.check_comments_for_duplicate( + client=mock_client, + result=self.mock_downstream, + username='mock_user' + ) + + # Assert everything was called correctly + self.assertEqual(response, 'Successful Call!') + mock_client.comments.assert_called_with(self.mock_downstream) + mock_client.issue.assert_called_with('TEST-1234') + + @mock.patch(PATH + '_comment_format') + @mock.patch(PATH + '_comment_format_legacy') + def test_find_comment_in_jira_legacy(self, + mock_comment_format_legacy, + mock_comment_format): + """ + This function tests '_find_comment_in_jira' where we find a legacy comment + """ + # Set up return values + mock_comment_format.return_value = 'mock_comment_body' + mock_comment_format_legacy.return_value = 'mock_legacy_comment_body' + mock_jira_comment = MagicMock() + mock_jira_comment.raw = {'body': 'mock_legacy_comment_body'} + mock_comment = { + 'id': '12345', + 'date_created': datetime(2019, 8, 8) + } + + # Call the function + response = d._find_comment_in_jira(mock_comment, [mock_jira_comment]) + + # Assert everything was called correctly + mock_comment_format_legacy.assert_called_with(mock_comment) + mock_comment_format.assert_called_with(mock_comment) + self.assertEqual(response, mock_jira_comment) + + @mock.patch(PATH + '_comment_format') + @mock.patch(PATH + '_comment_format_legacy') + def test_find_comment_in_jira_id(self, + mock_comment_format_legacy, + mock_comment_format): + """ + This function tests '_find_comment_in_jira' where we match an ID + """ + # Set up return values + mock_comment_format.return_value = 'mock_comment_body' + mock_comment_format_legacy.return_value = 'mock_legacy_comment_body' + mock_jira_comment = MagicMock() + mock_jira_comment.raw = {'body': '12345'} + mock_comment = { + 'id': '12345', + 'date_created': datetime(2019, 8, 8) + } + + # Call the function + response = d._find_comment_in_jira(mock_comment, [mock_jira_comment]) + + # Assert everything was called correctly + mock_comment_format_legacy.assert_called_with(mock_comment) + mock_comment_format.assert_called_with(mock_comment) + self.assertEqual(response, mock_jira_comment) + + @mock.patch(PATH + '_comment_format') + @mock.patch(PATH + '_comment_format_legacy') + def test_find_comment_in_jira_old_comment(self, + mock_comment_format_legacy, + mock_comment_format): + """ + This function tests '_find_comment_in_jira' where we find a old comment + """ + # Set up return values + mock_comment_format.return_value = 'mock_comment_body' + mock_comment_format_legacy.return_value = 'mock_legacy_comment_body' + mock_jira_comment = MagicMock() + mock_jira_comment.raw = {'body': 'old_comment'} + mock_comment = { + 'id': '12345', + 'date_created': datetime(2019, 1, 1) + } + + # Call the function + response = d._find_comment_in_jira(mock_comment, [mock_jira_comment]) + + # Assert everything was called correctly + mock_comment_format_legacy.assert_called_with(mock_comment) + mock_comment_format.assert_called_with(mock_comment) + self.assertEqual(response, mock_jira_comment) + + @mock.patch(PATH + '_comment_format') + @mock.patch(PATH + '_comment_format_legacy') + def test_find_comment_in_jira_none(self, + mock_comment_format_legacy, + mock_comment_format): + """ + This function tests '_find_comment_in_jira' where we return None + """ + # Set up return values + mock_comment_format.return_value = 'mock_comment_body' + mock_comment_format_legacy.return_value = 'mock_legacy_comment_body' + mock_comment = { + 'id': '12345', + 'date_created': datetime(2019, 1, 1) + } + + # Call the function + response = d._find_comment_in_jira(mock_comment, []) + + # Assert everything was called correctly + mock_comment_format_legacy.assert_called_with(mock_comment) + mock_comment_format.assert_called_with(mock_comment) + self.assertEqual(response, None) + + def test_check_jira_status_false(self): + """ + This function tests 'check_jira_status' where we return false + """ + # Set up return values + mock_jira_client = MagicMock() + mock_jira_client.search_issues.return_value = [] + + # Call the function + response = d.check_jira_status(mock_jira_client) + + # Assert everything was called correctly + self.assertEqual(response, False) + mock_jira_client.search_issues.assert_called_with("issueFunction in linkedIssuesOfRemote('*')") + + def test_check_jira_status_true(self): + """ + This function tests 'check_jira_status' where we return false + """ + # Set up return values + mock_jira_client = MagicMock() + mock_jira_client.search_issues.return_value = ['some', 'values'] + + # Call the function + response = d.check_jira_status(mock_jira_client) + + # Assert everything was called correctly + self.assertEqual(response, True) + mock_jira_client.search_issues.assert_called_with("issueFunction in linkedIssuesOfRemote('*')") + + def test_update_url_no_update(self): + """ + This function tests '_update_url' where we already have the URL + """ + # Set up return values + self.mock_downstream.fields.description = self.mock_issue.url + + # Call the function + d._update_url(self.mock_downstream, self.mock_issue) + + # Assert everything was called correctly + self.mock_downstream.update.assert_not_called() + + @mock.patch(PATH + 'confluence_client') + def test_update_url_update(self, + mock_confluence_client): + """ + This function tests '_update_url' where we already have the URL + """ + # Set up return values + mock_confluence_client.update_stat = True + self.mock_downstream.fields.description = "" + + # Call the function + d._update_url(self.mock_downstream, self.mock_issue) + + # Assert everything was called correctly + self.mock_downstream.update.assert_called_with( + {'description': + f"\nUpstream URL: {self.mock_issue.url}\n"}) + + @mock.patch(PATH + 'confluence_client') + def test_update_on_close_update(self, + mock_confluence_client): + """ + This function tests '_update_on_close' where there is an + "apply_labels" configuration, and labels need to be updated. + """ + # Set up return values + mock_confluence_client.update_stat = True + self.mock_downstream.fields.description = "" + self.mock_issue.status = 'Closed' + updates = [{"on_close": {"apply_labels": ["closed-upstream"]}}] + + # Call the function + d._update_on_close(self.mock_downstream, self.mock_issue, updates) + + # Assert everything was called correctly + self.mock_downstream.update.assert_called_with( + {'labels': + ["closed-upstream", "tag3", "tag4"]}) + + def test_update_on_close_no_change(self): + """ + This function tests '_update_on_close' where there is an + "apply_labels" configuration but there is no update required. + """ + # Set up return values + self.mock_issue.status = 'Closed' + updates = [{"on_close": {"apply_labels": ["tag4"]}}] + + # Call the function + d._update_on_close(self.mock_downstream, self.mock_issue, updates) + + # Assert everything was called correctly + self.mock_downstream.update.assert_not_called() + + def test_update_on_close_no_action(self): + """ + This function tests '_update_on_close' where there is no + "apply_labels" configuration. + """ + # Set up return values + self.mock_issue.status = 'Closed' + updates = [{"on_close": {"some_other_action": None}}] + + # Call the function + d._update_on_close(self.mock_downstream, self.mock_issue, updates) + + # Assert everything was called correctly + self.mock_downstream.update.assert_not_called() + + def test_update_on_close_no_config(self): + """ + This function tests '_update_on_close' where there is no + configuration for close events. + """ + # Set up return values + self.mock_issue.status = 'Closed' + updates = ["description"] + + # Call the function + d._update_on_close(self.mock_downstream, self.mock_issue, updates) + + # Assert everything was called correctly + self.mock_downstream.update.assert_not_called() diff --git a/tests/test_downstream_pr.py b/tests/test_downstream_pr.py new file mode 100644 index 0000000..7e88273 --- /dev/null +++ b/tests/test_downstream_pr.py @@ -0,0 +1,337 @@ +import unittest +import mock +try: + # Python 3.3 > + from unittest.mock import MagicMock # noqa: F401 +except ImportError: + from mock import MagicMock # noqa: F401 + +import sync2jira.downstream_pr as d + +PATH = 'sync2jira.downstream_pr.' + + +class TestDownstreamPR(unittest.TestCase): + """ + This class tests the downstream_pr.py file under sync2jira + """ + + def setUp(self): + """ + Setting up the testing environment + """ + self.mock_pr = MagicMock() + self.mock_pr.jira_key = 'JIRA-1234' + self.mock_pr.suffix = 'mock_suffix' + self.mock_pr.title = 'mock_title' + self.mock_pr.url = 'mock_url' + self.mock_pr.reporter = 'mock_reporter' + self.mock_pr.downstream = {'pr_updates': [ + {'merge_transition': 'CUSTOM_TRANSITION1'}, + {'link_transition': 'CUSTOM_TRANSITION2'}, + ]} + + self.mock_config = { + 'sync2jira': { + 'default_jira_instance': 'another_jira_instance', + 'jira': { + 'mock_jira_instance': {'mock_jira': 'mock_jira'}, + 'another_jira_instance': {'basic_auth': ['mock_user'], + 'options': {'server': 'mock_server'}} + }, + 'testing': False, + 'legacy_matching': False, + 'admins': [{'mock_admin': 'mock_email'}], + 'develop': False + }, + } + + self.mock_client = MagicMock() + mock_user = MagicMock() + mock_user.displayName = 'mock_reporter' + mock_user.key = 'mock_key' + self.mock_client.search_users.return_value = [mock_user] + self.mock_client.search_issues.return_value = ['mock_existing'] + + self.mock_existing = MagicMock() + + @mock.patch(PATH + 'update_jira_issue') + @mock.patch(PATH + "d_issue") + @mock.patch(PATH + "update_transition") + def test_sync_with_jira_link(self, + mock_update_transition, + mock_d_issue, + mock_update_jira_issue): + """ + This function tests 'sync_with_jira' + """ + # Set up return values + mock_d_issue.get_jira_client.return_value = self.mock_client + + # Call the function + d.sync_with_jira(self.mock_pr, self.mock_config) + + # Assert everything was called correctly + mock_update_jira_issue.assert_called_with('mock_existing', self.mock_pr, self.mock_client) + self.mock_client.search_issues.assert_called_with('Key = JIRA-1234') + mock_d_issue.get_jira_client.assert_called_with(self.mock_pr, self.mock_config) + mock_update_transition.mock.asset_called_with(self.mock_client, 'mock_existing', self.mock_pr, 'link_transition') + + @mock.patch(PATH + 'update_jira_issue') + @mock.patch(PATH + "d_issue") + @mock.patch(PATH + "update_transition") + def test_sync_with_jira_merged(self, + mock_update_transition, + mock_d_issue, + mock_update_jira_issue): + """ + This function tests 'sync_with_jira' + """ + # Set up return values + mock_client = MagicMock() + mock_client.search_issues.return_value = ['mock_existing'] + mock_d_issue.get_jira_client.return_value = mock_client + self.mock_pr.suffix = 'merged' + + # Call the function + d.sync_with_jira(self.mock_pr, self.mock_config) + + # Assert everything was called correctly + mock_update_jira_issue.assert_called_with('mock_existing', self.mock_pr, mock_client) + mock_client.search_issues.assert_called_with('Key = JIRA-1234') + mock_d_issue.get_jira_client.assert_called_with(self.mock_pr, self.mock_config) + mock_update_transition.mock.asset_called_with(mock_client, 'mock_existing', self.mock_pr, 'merged_transition') + + @mock.patch(PATH + 'update_jira_issue') + @mock.patch(PATH + "d_issue") + def test_sync_with_jira_no_issues_found(self, + mock_d_issue, + mock_update_jira_issue): + """ + This function tests 'sync_with_jira' where no issues are found + """ + # Set up return values + self.mock_client.search_issues.return_value = [] + mock_d_issue.get_jira_client.return_value = self.mock_client + + # Call the function + d.sync_with_jira(self.mock_pr, self.mock_config) + + # Assert everything was called correctly + mock_update_jira_issue.assert_not_called() + self.mock_client.search_issues.assert_called_with('Key = JIRA-1234') + mock_d_issue.get_jira_client.assert_called_with(self.mock_pr, self.mock_config) + + @mock.patch(PATH + 'update_jira_issue') + @mock.patch(PATH + "d_issue") + def test_sync_with_jira_testing(self, + mock_d_issue, + mock_update_jira_issue): + """ + This function tests 'sync_with_jira' where no issues are found + """ + # Set up return values + mock_client = MagicMock() + mock_client.search_issues.return_value = [] + self.mock_config['sync2jira']['testing'] = True + mock_d_issue.get_jira_client.return_value = mock_client + + # Call the function + d.sync_with_jira(self.mock_pr, self.mock_config) + + # Assert everything was called correctly + mock_update_jira_issue.assert_not_called() + mock_client.search_issues.assert_not_called() + mock_d_issue.get_jira_client.assert_not_called() + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'comment_exists') + @mock.patch(PATH + 'format_comment') + @mock.patch(PATH + 'd_issue.attach_link') + @mock.patch(PATH + 'issue_link_exists') + def test_update_jira_issue_link(self, + mock_issue_link_exists, + mock_attach_link, + mock_format_comment, + mock_comment_exists, + mock_confluence_client): + """ + This function tests 'update_jira_issue' + """ + # Set up return values + mock_format_comment.return_value = 'mock_formatted_comment' + mock_comment_exists.return_value = False + mock_issue_link_exists.return_value = False + mock_confluence_client.update_stat = True + + # Call the function + d.update_jira_issue('mock_existing', self.mock_pr, self.mock_client) + + # Assert everything was called correctly + self.mock_client.add_comment.assert_called_with('mock_existing', 'mock_formatted_comment') + mock_format_comment.assert_called_with(self.mock_pr, self.mock_pr.suffix, self.mock_client) + mock_comment_exists.assert_called_with(self.mock_client, 'mock_existing', 'mock_formatted_comment') + mock_confluence_client.update_stat_page.assert_called_with({'Comments': 1}) + mock_attach_link.assert_called_with(self.mock_client, 'mock_existing', {'url': 'mock_url', 'title': '[PR] mock_title'}) + + def test_issue_link_exists_false(self): + """ + This function tests 'issue_link_exists' where it does not exist + """ + # Set up return values + mock_issue_link = MagicMock() + mock_issue_link.object.url = 'bad_url' + self.mock_client.remote_links.return_value = [mock_issue_link] + + # Call the function + ret = d.issue_link_exists(self.mock_client, self.mock_existing, self.mock_pr) + + # Assert everything was called correctly + self.mock_client.remote_links.assert_called_with(self.mock_existing) + self.assertEqual(ret, False) + + def test_issue_link_exists_true(self): + """ + This function tests 'issue_link_exists' where it does exist + """ + # Set up return values + mock_issue_link = MagicMock() + mock_issue_link.object.url = self.mock_pr.url + self.mock_client.remote_links.return_value = [mock_issue_link] + + # Call the function + ret = d.issue_link_exists(self.mock_client, self.mock_existing, self.mock_pr) + + # Assert everything was called correctly + self.mock_client.remote_links.assert_called_with(self.mock_existing) + self.assertEqual(ret, True) + + @mock.patch(PATH + 'format_comment') + @mock.patch(PATH + 'comment_exists') + @mock.patch(PATH + 'd_issue.attach_link') + @mock.patch(PATH + 'issue_link_exists') + def test_update_jira_issue_exists(self, + mock_issue_link_exists, + mock_attach_link, + mock_comment_exists, + mock_format_comment, + ): + """ + This function tests 'update_jira_issue' where the comment already exists + """ + # Set up return values + mock_format_comment.return_value = 'mock_formatted_comment' + mock_comment_exists.return_value = True + mock_issue_link_exists.return_value = True + + # Call the function + d.update_jira_issue('mock_existing', self.mock_pr, self.mock_client) + + # Assert everything was called correctly + self.mock_client.add_comment.assert_not_called() + mock_format_comment.assert_called_with(self.mock_pr, self.mock_pr.suffix, self.mock_client) + mock_comment_exists.assert_called_with(self.mock_client, 'mock_existing', 'mock_formatted_comment') + mock_attach_link.assert_not_called() + mock_issue_link_exists.assert_called_with(self.mock_client, 'mock_existing', self.mock_pr) + + + def test_comment_exists_false(self): + """ + This function tests 'comment_exists' where the comment does not exists + """ + # Set up return values + mock_comment = MagicMock() + mock_comment.body = 'not_mock_new_comment' + self.mock_client.comments.return_value = [mock_comment] + + # Call the function + response = d.comment_exists(self.mock_client, 'mock_existing', 'mock_new_comment') + + # Assert Everything was called correctly + self.mock_client.comments.assert_called_with('mock_existing') + self.assertEqual(response, False) + + def test_comment_exists_true(self): + """ + This function tests 'comment_exists' where the comment exists + """ + # Set up return values + mock_comment = MagicMock() + mock_comment.body = 'mock_new_comment' + self.mock_client.comments.return_value = [mock_comment] + + # Call the function + response = d.comment_exists(self.mock_client, 'mock_existing', 'mock_new_comment') + + # Assert Everything was called correctly + self.mock_client.comments.assert_called_with('mock_existing') + self.assertEqual(response, True) + + def test_format_comment_closed(self): + """ + This function tests 'format_comment' where the PR is closed + """ + # Call the function + response = d.format_comment(self.mock_pr, 'closed', self.mock_client) + + # Assert Everything was called correctly + self.assertEqual(response, "Merge request [mock_title| mock_url] was closed.") + + def test_format_comment_reopened(self): + """ + This function tests 'format_comment' where the PR is reopened + """ + # Call the function + response = d.format_comment(self.mock_pr, 'reopened', self.mock_client) + + # Assert Everything was called correctly + self.assertEqual(response, "Merge request [mock_title| mock_url] was reopened.") + + def test_format_comment_merged(self): + """ + This function tests 'format_comment' where the PR is merged + """ + # Call the function + response = d.format_comment(self.mock_pr, 'merged', self.mock_client) + + # Assert Everything was called correctly + self.assertEqual(response, "Merge request [mock_title| mock_url] was merged!") + + def test_format_comment_open(self): + """ + This function tests 'format_comment' where the PR is open + """ + # Call the function + response = d.format_comment(self.mock_pr, 'open', self.mock_client) + + # Assert Everything was called correctly + self.assertEqual(response, "[~mock_key] mentioned this issue in merge request [mock_title| mock_url].") + + + def test_format_comment_open_no_user_found(self): + """ + This function tests 'format_comment' where the PR is open and search_users returns nothing + """ + # Set up return values + self.mock_client.search_users.return_value = [] + + # Call the function + response = d.format_comment(self.mock_pr, 'open', self.mock_client) + + # Assert Everything was called correctly + self.assertEqual(response, "mock_reporter mentioned this issue in merge request [mock_title| mock_url].") + + @mock.patch(PATH + 'd_issue') + def test_update_transition(self, + mock_d_issue): + """ + This function tests 'update_transition' + """ + # Set up return values + mock_client = MagicMock() + + # Call the function + d.update_transition(mock_client, self.mock_existing, self.mock_pr, 'merge_transition') + + # Assert everything was called correctly + mock_d_issue.change_status.assert_called_with(mock_client, self.mock_existing, 'CUSTOM_TRANSITION1', self.mock_pr) diff --git a/tests/test_intermediary.py b/tests/test_intermediary.py new file mode 100644 index 0000000..3f214d8 --- /dev/null +++ b/tests/test_intermediary.py @@ -0,0 +1,366 @@ +from datetime import datetime +import mock +import unittest + +import sync2jira.intermediary as i + +PATH = 'sync2jira.intermediary.' + +class TestIntermediary(unittest.TestCase): + """ + This class tests the downstream_issue.py file under sync2jira + """ + def setUp(self): + self.mock_config = { + 'sync2jira': { + 'pagure_url': 'dummy_pagure_url', + 'map': { + 'pagure': { + 'pagure': {'mock_downstream': 'mock_key'} + }, + 'github': { + 'github': {'mock_downstream': 'mock_key'} + } + } + } + } + self.mock_pagure_issue = { + 'comments': [{ + 'date_created': '1234', + 'user': { + 'name': 'mock_name' + }, + 'comment': 'mock_body', + 'id': '1234', + }], + 'title': 'mock_title', + 'id': 1234, + 'tags': 'mock_tags', + 'milestone': 'mock_milestone', + 'priority': 'mock_priority', + 'content': 'mock_content', + 'user': 'mock_reporter', + 'assignee': 'mock_assignee', + 'status': 'mock_status', + 'date_created': 'mock_date' + } + + self.mock_github_issue = { + 'comments': [{ + 'author': 'mock_author', + 'name': 'mock_name', + 'body': 'mock_body', + 'id': 'mock_id', + 'date_created': 'mock_date' + }], + 'title': 'mock_title', + 'html_url': 'mock_url', + 'id': 1234, + 'labels': 'mock_tags', + 'milestone': 'mock_milestone', + 'priority': 'mock_priority', + 'body': 'mock_content', + 'user': 'mock_reporter', + 'assignees': 'mock_assignee', + 'state': 'open', + 'date_created': 'mock_date', + 'number': '1', + } + + self.mock_github_pr = { + 'comments': [{ + 'author': 'mock_author', + 'name': 'mock_name', + 'body': 'mock_body', + 'id': 'mock_id', + 'date_created': 'mock_date' + }], + 'title': 'mock_title', + 'html_url': 'mock_url', + 'id': 1234, + 'labels': 'mock_tags', + 'milestone': 'mock_milestone', + 'priority': 'mock_priority', + 'body': 'mock_content', + 'user': {'fullname': 'mock_reporter'}, + 'assignee': 'mock_assignee', + 'state': 'open', + 'date_created': 'mock_date', + 'number': 1234, + } + + self.mock_pagure_pr = { + 'comments': [{ + 'date_created': '1234', + 'user': { + 'name': 'mock_name' + }, + 'comment': 'mock_body', + 'id': '1234', + }], + 'title': 'mock_title', + 'id': 1234, + 'tags': 'mock_tags', + 'milestone': 'mock_milestone', + 'priority': 'mock_priority', + 'content': 'mock_content', + 'user': {'fullname': 'mock_reporter'}, + 'assignee': 'mock_assignee', + 'status': 'mock_status', + 'date_created': 'mock_date', + 'project': {'name': 'mock_project_name'}, + 'initial_comment': 'mock_content_initial' + } + + @mock.patch(PATH + 'datetime') + def test_from_pagure(self, + mock_datetime): + """ + This tests the 'from_pagure' function under the Issue class + """ + # Set up return values + mock_datetime.fromtimestamp.return_value = 'mock_date' + + # Call the function + response = i.Issue.from_pagure( + upstream='pagure', + issue=self.mock_pagure_issue, + config=self.mock_config + ) + + # Assert that we made the calls correctly + self.assertEqual(response.source, 'pagure') + self.assertEqual(response.title, '[pagure] mock_title') + self.assertEqual(response.url, 'dummy_pagure_url/pagure/issue/1234') + self.assertEqual(response.upstream, 'pagure') + self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', + 'author': 'mock_name', 'changed': None, + 'date_created': 'mock_date', 'id': '1234'}]) + self.assertEqual(response.tags, 'mock_tags') + self.assertEqual(response.fixVersion, ['mock_milestone']) + self.assertEqual(response.priority, 'mock_priority') + self.assertEqual(response.content, 'mock_content') + self.assertEqual(response.reporter, 'mock_reporter') + self.assertEqual(response.assignee, 'mock_assignee') + self.assertEqual(response.status, 'mock_status') + self.assertEqual(response.id, 'mock_date') + self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'}) + + def test_from_github_open(self): + """ + This tests the 'from_github' function under the Issue class where the state is open + """ + # Call the function + response = i.Issue.from_github( + upstream='github', + issue=self.mock_github_issue, + config=self.mock_config + ) + + # Assert that we made the calls correctly + self.assertEqual(response.source, 'github') + self.assertEqual(response.title, '[github] mock_title') + self.assertEqual(response.url, 'mock_url') + self.assertEqual(response.upstream, 'github') + self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', 'author': 'mock_author', + 'changed': None, 'date_created': 'mock_date', 'id': 'mock_id'}]) + self.assertEqual(response.tags, 'mock_tags') + self.assertEqual(response.fixVersion, ['mock_milestone']) + self.assertEqual(response.priority, None) + self.assertEqual(response.content, 'mock_content') + self.assertEqual(response.reporter, 'mock_reporter') + self.assertEqual(response.assignee, 'mock_assignee') + self.assertEqual(response.status, 'Open') + self.assertEqual(response.id, '1234') + self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'}) + + def test_from_github_closed(self): + """ + This tests the 'from_github' function under the Issue class where the state is closed + """ + # Set up return values + self.mock_github_issue['state'] = 'closed' + + # Call the function + response = i.Issue.from_github( + upstream='github', + issue=self.mock_github_issue, + config=self.mock_config + ) + + # Assert that we made the calls correctly + self.assertEqual(response.source, 'github') + self.assertEqual(response.title, '[github] mock_title') + self.assertEqual(response.url, 'mock_url') + self.assertEqual(response.upstream, 'github') + self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', 'author': 'mock_author', + 'changed': None, 'date_created': 'mock_date', 'id': 'mock_id'}]) + self.assertEqual(response.tags, 'mock_tags') + self.assertEqual(response.fixVersion, ['mock_milestone']) + self.assertEqual(response.priority, None) + self.assertEqual(response.content, 'mock_content') + self.assertEqual(response.reporter, 'mock_reporter') + self.assertEqual(response.assignee, 'mock_assignee') + self.assertEqual(response.status, 'Closed') + self.assertEqual(response.id, '1234') + self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'}) + + def test_mapping_github(self): + """ + This tests the mapping feature from github + """ + # Set up return values + self.mock_config['sync2jira']['map']['github']['github'] = { + 'mock_downstream': 'mock_key', + 'mapping': [{'fixVersion': 'Test XXX'}] + } + self.mock_github_issue['state'] = 'closed' + + # Call the function + response = i.Issue.from_github( + upstream='github', + issue=self.mock_github_issue, + config=self.mock_config + ) + + # Assert that we made the calls correctly + self.assertEqual(response.source, 'github') + self.assertEqual(response.title, '[github] mock_title') + self.assertEqual(response.url, 'mock_url') + self.assertEqual(response.upstream, 'github') + self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', 'author': 'mock_author', + 'changed': None, 'date_created': 'mock_date', 'id': 'mock_id'}]) + self.assertEqual(response.tags, 'mock_tags') + self.assertEqual(response.fixVersion, ['Test mock_milestone']) + self.assertEqual(response.priority, None) + self.assertEqual(response.content, 'mock_content') + self.assertEqual(response.reporter, 'mock_reporter') + self.assertEqual(response.assignee, 'mock_assignee') + self.assertEqual(response.status, 'Closed') + self.assertEqual(response.id, '1234') + self.assertEqual(response.downstream, { + 'mock_downstream': 'mock_key', + 'mapping': [{'fixVersion': 'Test XXX'}]}) + + @mock.patch(PATH + 'datetime') + def test_mapping_pagure(self, + mock_datetime): + """ + This tests the mapping feature from pagure + """ + # Set up return values + mock_datetime.fromtimestamp.return_value = 'mock_date' + self.mock_config['sync2jira']['map']['pagure']['pagure'] = { + 'mock_downstream': 'mock_key', + 'mapping': [{'fixVersion': 'Test XXX'}] + } + + # Call the function + response = i.Issue.from_pagure( + upstream='pagure', + issue=self.mock_pagure_issue, + config=self.mock_config + ) + + # Assert that we made the calls correctly + self.assertEqual(response.source, 'pagure') + self.assertEqual(response.title, '[pagure] mock_title') + self.assertEqual(response.url, 'dummy_pagure_url/pagure/issue/1234') + self.assertEqual(response.upstream, 'pagure') + self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', + 'author': 'mock_name', 'changed': None, + 'date_created': 'mock_date', + 'id': '1234'}]) + self.assertEqual(response.tags, 'mock_tags') + self.assertEqual(response.fixVersion, ['Test mock_milestone']) + self.assertEqual(response.priority, 'mock_priority') + self.assertEqual(response.content, 'mock_content') + self.assertEqual(response.reporter, 'mock_reporter') + self.assertEqual(response.assignee, 'mock_assignee') + self.assertEqual(response.status, 'mock_status') + self.assertEqual(response.id, 'mock_date') + self.assertEqual(response.downstream, { + 'mock_downstream': 'mock_key', + 'mapping': [{'fixVersion': 'Test XXX'}]}) + + @mock.patch(PATH + 'matcher') + def test_from_github_pr_reopen(self, + mock_matcher): + """ + This tests the from GitHub for a PR + """ + # Set up return values + mock_matcher.return_value = "JIRA-1234" + + # Call the function + response = i.PR.from_github( + upstream='github', + pr=self.mock_github_pr, + suffix='reopened', + config=self.mock_config + ) + + # Assert that we made the calls correctly + self.assertEqual(response.source, 'github') + self.assertEqual(response.title, '[github] mock_title') + self.assertEqual(response.url, 'mock_url') + self.assertEqual(response.upstream, 'github') + self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', 'author': 'mock_author', + 'changed': None, 'date_created': 'mock_date', 'id': 'mock_id'}]) + self.assertEqual(response.priority, None) + self.assertEqual(response.content, 'mock_content') + self.assertEqual(response.reporter, 'mock_reporter') + self.assertEqual(response.assignee, 'mock_assignee') + self.assertEqual(response.status, None) + self.assertEqual(response.id, '1234') + self.assertEqual(response.suffix, 'reopened') + self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'}) + self.assertEqual(response.jira_key, "JIRA-1234") + self.mock_github_pr['comments'][0]['changed'] = None + mock_matcher.assert_called_with(self.mock_github_pr['body'], self.mock_github_pr['comments']) + + @mock.patch(PATH + 'datetime') + @mock.patch(PATH + 'matcher') + def test_from_pagure_pr_reopen(self, + mock_matcher, + mock_datetime): + """ + This tests the from Pagure for a PR + """ + # Set up return values + mock_matcher.return_value = "JIRA-1234" + mock_datetime.fromtimestamp.return_value = '1234' + + # Call the function + response = i.PR.from_pagure( + upstream='pagure', + pr=self.mock_pagure_pr, + suffix='reopened', + config=self.mock_config + ) + + # Assert that we made the calls correctly + formatted_comments = [{'author': 'mock_name', 'body': 'mock_body', + 'name': 'mock_name', 'id': '1234', + 'date_created': '1234', + 'changed': None}] + self.assertEqual(response.source, 'pagure') + self.assertEqual(response.title, '[pagure] mock_title') + self.assertEqual(response.url, 'https://pagure.io/mock_project_name/pull-request/1234') + self.assertEqual(response.upstream, 'pagure') + self.assertEqual(response.comments, formatted_comments) + self.assertEqual(response.priority, None) + self.assertEqual(response.content, 'mock_content_initial') + self.assertEqual(response.reporter, 'mock_reporter') + self.assertEqual(response.assignee, 'mock_assignee') + self.assertEqual(response.status, 'mock_status') + self.assertEqual(response.id, '1234') + self.assertEqual(response.suffix, 'reopened') + self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'}) + self.assertEqual(response.jira_key, "JIRA-1234") + self.mock_pagure_pr['comments'][0]['changed'] = None + mock_datetime.fromtimestamp.assert_called_with(float(1234)) + mock_matcher.assert_called_with(self.mock_pagure_pr['initial_comment'], formatted_comments) + + + # TODO: Add new tests from PR \ No newline at end of file diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 0000000..0c76be6 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,579 @@ +import mock +import unittest +try: + # Python 3.3 > + from unittest.mock import MagicMock # noqa: F401 +except ImportError: + from mock import MagicMock # noqa: F401 + +import sync2jira.main as m + + +PATH = 'sync2jira.main.' + + +class TestMain(unittest.TestCase): + """ + This class tests the main.py file under sync2jira + """ + def setUp(self): + """ + Set up the testing environment + """ + # Mock Config dict + self.mock_config = { + 'sync2jira': { + 'jira': { + 'mock_jira_instance': {'mock_jira': 'mock_jira'} + }, + 'confluence_statistics': True, + 'testing': {}, + 'legacy_matching': False, + 'map': { + 'pagure': {'key_pagure': {'sync': ['issue', 'pullrequest']}}, + 'github': {'key_github': {'sync': ['issue', 'pullrequest']}} + }, + 'initialize': True, + 'listen': True, + 'develop': False, + }, + } + + # Mock Fedmsg Message + self.mock_message = { + 'msg_id': 'mock_id', + 'msg': {'issue': 'mock_issue'} + } + + def _check_for_exception(self, loader, target, exc=ValueError): + try: + m.load_config(loader) + assert False, "Exception expected." + except exc as e: + self.assertIn(target, repr(e)) + + def test_config_validate_empty(self): + loader = lambda: {} + self._check_for_exception(loader, 'No sync2jira section') + + def test_config_validate_missing_map(self): + loader = lambda: {'sync2jira': {}} + self._check_for_exception(loader, 'No sync2jira.map section') + + def test_config_validate_mispelled_mappings(self): + loader = lambda: {'sync2jira': {'map': {'pageur': {}}}, 'jira': {}} + self._check_for_exception(loader, 'Specified handlers: "pageur", must') + + def test_config_validate_missing_jira(self): + loader = lambda: {'sync2jira': {'map': {'pagure': {}}}} + self._check_for_exception(loader, 'No sync2jira.jira section') + + def test_config_validate_all_good(self): + loader = lambda: {'sync2jira': {'map': {'pagure': {}}, 'jira': {}}} + m.load_config(loader) # ahhh, no exception. + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + @mock.patch(PATH + 'load_config') + def test_close_duplicates(self, + mock_load_config, + mock_d, + mock_u): + """ + This tests the 'close_duplicates' function where everything goes smoothly + """ + # Set up return values + mock_load_config.return_value = self.mock_config + mock_u.pagure_issues.return_value = ['mock_issue_github'] + mock_u.github_issues.return_value = ['mock_issue_pagure'] + + # Call the function + m.close_duplicates() + + # Assert everything was called correctly + mock_load_config.assert_called_once() + mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config) + mock_u.github_issues.assert_called_with('key_github', self.mock_config) + mock_d.close_duplicates.assert_any_call('mock_issue_github', self.mock_config) + mock_d.close_duplicates.assert_any_call('mock_issue_pagure', self.mock_config) + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + @mock.patch(PATH + 'load_config') + def test_close_duplicates_errors(self, + mock_load_config, + mock_d, + mock_u): + """ + This tests the 'close_duplicates' function where closing duplicates raises an exception + """ + # Set up return values + mock_load_config.return_value = self.mock_config + mock_u.pagure_issues.return_value = ['mock_issue'] + mock_u.github_issues.return_value = ['mock_issue'] + mock_d.close_duplicates.side_effect = Exception() + + # Call the function + with self.assertRaises(Exception): + m.close_duplicates() + + # Assert everything was called correctly + mock_load_config.assert_called_once() + mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config) + mock_u.github_issues.assert_not_called() + mock_d.close_duplicates.assert_called_with('mock_issue', self.mock_config) + + @mock.patch(PATH + 'load_config') + @mock.patch(PATH + 'u_issue') + def test_list_managed(self, + mock_u, + mock_load_config): + """ + This tests the 'list_managed' function + """ + # Set up return values + mock_load_config.return_value = self.mock_config + + # Call the function + m.list_managed() + + # Assert everything was called correctly + mock_load_config.assert_called_once() + mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config) + mock_u.github_issues.assert_called_with('key_github', self.mock_config) + + @mock.patch(PATH + 'initialize_recent') + @mock.patch(PATH + 'report_failure') + @mock.patch(PATH + 'INITIALIZE', 1) + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'initialize_issues') + @mock.patch(PATH + 'initialize_pr') + @mock.patch(PATH + 'load_config') + @mock.patch(PATH + 'listen') + def test_main_initialize(self, + mock_listen, + mock_load_config, + mock_initialize_pr, + mock_initialize_issues, + mock_confluence_client, + mock_report_failure, + mock_initialize_recent): + """ + This tests the 'main' function + """ + # Set up return values + mock_load_config.return_value = self.mock_config + self.mock_config['sync2jira']['confluence_statistics'] = True + + # Call the function + m.main() + + # Assert everything was called correctly + mock_load_config.assert_called_once() + mock_listen.assert_called_with(self.mock_config) + mock_listen.assert_called_with(self.mock_config) + mock_initialize_issues.assert_called_with(self.mock_config) + mock_initialize_pr.assert_called_with(self.mock_config) + mock_report_failure.assert_not_called() + mock_initialize_recent.assert_not_called() + mock_confluence_client.update_stat_value.assert_called_with(True) + + @mock.patch(PATH + 'confluence_client') + @mock.patch(PATH + 'initialize_recent') + @mock.patch(PATH + 'report_failure') + @mock.patch(PATH + 'INITIALIZE', 0) + @mock.patch(PATH + 'initialize_issues') + @mock.patch(PATH + 'initialize_pr') + @mock.patch(PATH + 'load_config') + @mock.patch(PATH + 'listen') + def test_main_no_initialize(self, + mock_listen, + mock_load_config, + mock_initialize_pr, + mock_initialize_issues, + mock_report_failure, + mock_initialize_recent, + mock_confluence_client,): + """ + This tests the 'main' function + """ + # Set up return values + mock_load_config.return_value = self.mock_config + + # Call the function + m.main() + + # Assert everything was called correctly + mock_load_config.assert_called_once() + mock_listen.assert_called_with(self.mock_config) + mock_listen.assert_called_with(self.mock_config) + mock_initialize_issues.assert_not_called() + mock_initialize_pr.assert_not_called() + mock_report_failure.assert_not_called() + mock_initialize_recent.assert_called_with(self.mock_config) + mock_confluence_client.update_stat_value.assert_called_with(True) + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + def test_initialize(self, + mock_d, + mock_u): + """ + This tests 'initialize' function where everything goes smoothly! + """ + # Set up return values + mock_u.pagure_issues.return_value = ['mock_issue_pagure'] + mock_u.github_issues.return_value = ['mock_issue_github'] + + # Call the function + m.initialize_issues(self.mock_config) + + # Assert everything was called correctly + mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config) + mock_u.github_issues.assert_called_with('key_github', self.mock_config) + mock_d.sync_with_jira.assert_any_call('mock_issue_pagure', self.mock_config) + mock_d.sync_with_jira.assert_any_call('mock_issue_github', self.mock_config) + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + def test_initialize_repo_name_pagure(self, + mock_d, + mock_u): + """ + This tests 'initialize' function where we want to sync an individual repo for Pagure + """ + # Set up return values + mock_u.pagure_issues.return_value = ['mock_issue_pagure'] + mock_u.github_issues.return_value = ['mock_issue_github'] + + # Call the function + m.initialize_issues(self.mock_config, repo_name='key_pagure') + + # Assert everything was called correctly + mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config) + mock_u.github_issues.assert_not_called() + mock_d.sync_with_jira.assert_called_with('mock_issue_pagure', self.mock_config) + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + def test_initialize_repo_name_github(self, + mock_d, + mock_u): + """ + This tests 'initialize' function where we want to sync an individual repo for GitHub + """ + # Set up return values + mock_u.pagure_issues.return_value = ['mock_issue_pagure'] + mock_u.github_issues.return_value = ['mock_issue_github'] + + # Call the function + m.initialize_issues(self.mock_config, repo_name='key_github') + + # Assert everything was called correctly + mock_u.github_issues.assert_called_with('key_github', self.mock_config) + mock_u.pagure_issues.assert_not_called() + mock_d.sync_with_jira.assert_called_with('mock_issue_github', self.mock_config) + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + def test_initialize_errors(self, + mock_d, + mock_u): + """ + This tests 'initialize' function where syncing with JIRA throws an exception + """ + # Set up return values + mock_u.pagure_issues.return_value = ['mock_issue_pagure'] + mock_u.github_issues.return_value = ['mock_issue_github'] + mock_d.sync_with_jira.side_effect = Exception() + + # Call the function + with self.assertRaises(Exception): + m.initialize_issues(self.mock_config) + + # Assert everything was called correctly + mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config) + mock_d.sync_with_jira.assert_any_call('mock_issue_pagure', self.mock_config) + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + @mock.patch(PATH + 'sleep') + @mock.patch(PATH + 'report_failure') + def test_initialize_api_limit(self, + mock_report_failure, + mock_sleep, + mock_d, + mock_u): + """ + This tests 'initialize' where we get an GitHub API limit error. + """ + # Set up return values + mock_error = MagicMock(side_effect=Exception('API rate limit exceeded')) + mock_u.pagure_issues.return_value = ['mock_issue_pagure'] + mock_u.github_issues.side_effect = mock_error + + # Call the function + m.initialize_issues(self.mock_config, testing=True) + + # Assert everything was called correctly + mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config) + mock_d.sync_with_jira.assert_any_call('mock_issue_pagure', self.mock_config) + mock_u.github_issues.assert_called_with('key_github', self.mock_config) + mock_sleep.assert_called_with(3600) + mock_report_failure.assert_not_called() + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + @mock.patch(PATH + 'sleep') + @mock.patch(PATH + 'report_failure') + def test_initialize_github_error(self, + mock_report_failure, + mock_sleep, + mock_d, + mock_u): + """ + This tests 'initialize' where we get a GitHub API (not limit) error. + """ + # Set up return values + mock_error = MagicMock(side_effect=Exception('Random Error')) + mock_u.pagure_issues.return_value = ['mock_issue_pagure'] + mock_u.github_issues.side_effect = mock_error + + # Call the function + with self.assertRaises(Exception): + m.initialize_issues(self.mock_config, testing=True) + + # Assert everything was called correctly + mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config) + mock_d.sync_with_jira.assert_any_call('mock_issue_pagure', self.mock_config) + mock_u.github_issues.assert_called_with('key_github', self.mock_config) + mock_sleep.assert_not_called() + mock_report_failure.assert_called_with(self.mock_config) + + @mock.patch(PATH + 'handle_msg') + @mock.patch(PATH + 'fedmsg') + def test_listen_no_handlers(self, + mock_fedmsg, + mock_handle_msg): + """ + Test 'listen' function where suffix is not in handlers + """ + # Set up return values + mock_fedmsg.tail_messages.return_value = [("dummy", "dummy", "mock_topic", self.mock_message)] + + # Call the function + m.listen(self.mock_config) + + # Assert everything was called correctly + mock_handle_msg.assert_not_called() + + @mock.patch(PATH + 'handle_msg') + @mock.patch(PATH + 'issue_handlers') + @mock.patch(PATH + 'fedmsg') + def test_listen_no_issue(self, + mock_fedmsg, + mock_handlers_issue, + mock_handle_msg): + """ + Test 'listen' function where the handler returns none + """ + # Set up return values + mock_handlers_issue['github.issue.comment'].return_value = None + mock_fedmsg.tail_messages.return_value = [("dummy", "dummy", "d.d.d.pagure.issue.drop", self.mock_message)] + + # Call the function + m.listen(self.mock_config) + + # Assert everything was called correctly + mock_handle_msg.assert_not_called() + + @mock.patch(PATH + 'handle_msg') + @mock.patch(PATH + 'issue_handlers') + @mock.patch(PATH + 'fedmsg') + def test_listen(self, + mock_fedmsg, + mock_handlers_issue, + mock_handle_msg): + """ + Test 'listen' function where everything goes smoothly + """ + # Set up return values + mock_handlers_issue['github.issue.comment'].return_value = 'dummy_issue' + mock_fedmsg.tail_messages.return_value = [("dummy", "dummy", "d.d.d.github.issue.comment", self.mock_message)] + + # Call the function + m.listen(self.mock_config) + + # Assert everything was called correctly + mock_handle_msg.assert_called_with( + self.mock_message, + 'github.issue.comment', self.mock_config) + + @mock.patch(PATH + 'send_mail') + @mock.patch(PATH + 'jinja2') + def test_report_failure(self, + mock_jinja2, + mock_send_mail): + """ + Tests 'report_failure' function + """ + # Set up return values + mock_templateLoader = MagicMock() + mock_templateEnv = MagicMock() + mock_template = MagicMock() + mock_template.render.return_value = 'mock_html' + mock_templateEnv.get_template.return_value = mock_template + mock_jinja2.FileSystemLoader.return_value = mock_templateLoader + mock_jinja2.Environment.return_value = mock_templateEnv + + # Call the function + m.report_failure({'sync2jira': {'mailing-list': 'mock_email'}}) + + # Assert everything was called correctly + mock_send_mail.assert_called_with(cc=None, + recipients=['mock_email'], + subject='Sync2Jira Has Failed!', + text='mock_html') + + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + def test_handle_msg_no_handlers(self, + mock_d, + mock_u): + """ + Tests 'handle_msg' function where there are no handlers + """ + # Call the function + m.handle_msg(self.mock_message, 'no_handler', self.mock_config) + + # Assert everything was called correctly + mock_d.sync_with_jira.assert_not_called() + mock_u.handle_github_message.assert_not_called() + mock_u.handle_pagure_message.assert_not_called() + + @mock.patch(PATH + 'issue_handlers') + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + def test_handle_msg_no_issue(self, + mock_d, + mock_u, + mock_handlers_issue): + """ + Tests 'handle_msg' function where there is no issue + """ + # Set up return values + mock_handlers_issue['github.issue.comment'].return_value = None + + # Call the function + m.handle_msg(self.mock_message, 'github.issue.comment', self.mock_config) + + # Assert everything was called correctly + mock_d.sync_with_jira.assert_not_called() + mock_u.handle_github_message.assert_not_called() + mock_u.handle_pagure_message.assert_not_called() + + @mock.patch(PATH + 'issue_handlers') + @mock.patch(PATH + 'u_issue') + @mock.patch(PATH + 'd_issue') + def test_handle_msg(self, + mock_d, + mock_u, + mock_handlers_issue): + """ + Tests 'handle_msg' function + """ + # Set up return values + mock_handlers_issue['github.issue.comment'].return_value = 'dummy_issue' + mock_u.handle_github_message.return_value = 'dummy_issue' + + # Call the function + m.handle_msg(self.mock_message, 'github.issue.comment', self.mock_config) + + # Assert everything was called correctly + mock_d.sync_with_jira.assert_called_with('dummy_issue', self.mock_config) + mock_u.handle_pagure_message.assert_not_called() + + @mock.patch(PATH + 'handle_msg') + @mock.patch(PATH + 'query') + def test_initialize_recent(self, + mock_query, + mock_handle_msg): + """ + Tests 'initialize_recent' function + """ + # Set up return values + mock_query.return_value = [{ + 'topic': 'm.m.m.github.issue.comment', + 'msg': 'mock_msg' + + }] + + # Call the function + m.initialize_recent(self.mock_config) + + # Assert everything was called correctly + mock_handle_msg.assert_called_with({'msg': 'mock_msg'}, 'github.issue.comment', self.mock_config) + + @mock.patch(PATH + 'handle_msg') + @mock.patch(PATH + 'query') + def test_initialize_recent_no_handler(self, + mock_query, + mock_handle_msg): + """ + Tests 'initialize_recent' function where the topic is not for a valid handler + """ + # Set up return values + mock_query.return_value = [{ + 'topic': 'm.m.m.bad.topic', + 'msg': 'mock_msg' + + }] + + # Call the function + m.initialize_recent(self.mock_config) + + # Assert everything was called correctly + mock_handle_msg.assert_not_called() + + @mock.patch(PATH + 'get') + def test_query(self, + mock_get): + """ + Tests 'query' function + """ + # Set up return values + mock_get.return_value = { + 'raw_messages': ['test_msg'], + 'count': 1, + 'total': 1 + } + # Call the function + response = m.query() + + # Assert everything was called correctly + mock_get.assert_called_with(params={'order': 'asc'}) + self.assertEqual(response, ['test_msg']) + + @mock.patch(PATH + 'HTTPKerberosAuth') + @mock.patch(PATH + 'requests') + def test_get(self, + mock_requests, + mock_kerberos_auth): + """ + Tests 'get' function + """ + # Set up return values + mock_response = MagicMock() + mock_response.json.return_value = 'mock_return_value' + mock_requests.get.return_value = mock_response + + # Call the function + response = m.get('mock_params') + + # Assert everything was called correctly + self.assertEqual(response, 'mock_return_value') + mock_requests.get.assert_called_with( + auth=mock_kerberos_auth(), + headers={'Accept': 'application/json'}, + params='mock_params', + url=m.DATAGREPPER_URL) diff --git a/tests/test_upstream_issue.py b/tests/test_upstream_issue.py new file mode 100644 index 0000000..a9ce9ae --- /dev/null +++ b/tests/test_upstream_issue.py @@ -0,0 +1,597 @@ +import mock +import unittest +try: + # Python 3.3 > + from unittest.mock import MagicMock # noqa: F401 +except ImportError: + from mock import MagicMock # noqa: F401 + + +import sync2jira.upstream_issue as u + + +PATH = 'sync2jira.upstream_issue.' + + +class TestUpstreamIssue(unittest.TestCase): + """ + This class tests the upstream_issue.py file under sync2jira + """ + def setUp(self): + self.mock_config = { + 'sync2jira': { + 'map': { + 'github': { + 'org/repo': {'sync': ['issue']}, + }, + 'pagure': { + 'org/repo': {'sync': ['issue']}, + }, + }, + 'jira': { + # Nothing, really.. + }, + 'filters': { + 'github': + {'org/repo': {'filter1': 'filter1', 'labels': 'custom_tag'}}, + 'pagure': + {'org/repo': {'filter1': 'filter1', 'tags': ['custom_tag']}}, + }, + 'github_token': 'mock_token' + }, + } + # Mock Pagure Message + self.mock_pagure_message = { + 'msg': { + 'project': { + 'name': 'org/repo' + }, + 'issue': { + 'filter1': 'filter1', + 'tags': ['custom_tag'], + 'comments': [], + 'assignee': 'mock_assignee' + }, + 'tags': ['new_tag'], + 'comment': 'new_comment', + 'status': 'temp' + }, + 'topic': 'io.pagure.prod.pagure.issue.drop', + } + + # Mock Github Comment + self.mock_github_comment = MagicMock() + self.mock_github_comment.user.name = 'mock_username' + self.mock_github_comment.user.login = 'mock_user_login' + self.mock_github_comment.body = 'mock_body' + self.mock_github_comment.id = 'mock_id' + self.mock_github_comment.created_at = 'mock_created_at' + + # Mock Github Message + self.mock_github_message = { + 'msg': { + 'repository': { + 'owner': { + 'login': 'org' + }, + 'name': 'repo' + }, + 'issue': { + 'filter1': 'filter1', + 'labels': [{'name': 'custom_tag'}], + 'comments': ['some_comments!'], + 'number': 'mock_number', + 'user': { + 'login': 'mock_login' + }, + 'assignees': [{'login': 'mock_login'}], + 'milestone': { + 'title': 'mock_milestone' + } + } + } + } + + # Mock github issue + self.mock_github_issue = MagicMock() + self.mock_github_issue.get_comments.return_value = [self.mock_github_comment] + + # Mock Github Issue Raw + self.mock_github_issue_raw = { + 'comments': ['some comment'], + 'number': '1234', + 'user': { + 'login': 'mock_login' + }, + 'assignees': [{'login': 'mock_assignee_login'}], + 'labels': [{'name': 'some_label'}], + 'milestone': { + 'title': 'mock_milestone' + } + } + + # Mock Github Reporter + self.mock_github_person = MagicMock() + self.mock_github_person.name = 'mock_name' + + # Mock Github Repo + self.mock_github_repo = MagicMock() + self.mock_github_repo.get_issue.return_value = self.mock_github_issue + + # Mock Github Client + self.mock_github_client = MagicMock() + self.mock_github_client.get_repo.return_value = self.mock_github_repo + self.mock_github_client.get_user.return_value = self.mock_github_person + + @mock.patch('sync2jira.intermediary.Issue.from_github') + @mock.patch(PATH + 'Github') + @mock.patch(PATH + 'get_all_github_data') + def test_github_issues(self, + mock_get_all_github_data, + mock_github, + mock_issue_from_github): + """ + This function tests 'github_issues' function + """ + # Set up return values + mock_github.return_value = self.mock_github_client + mock_get_all_github_data.return_value = [self.mock_github_issue_raw] + mock_issue_from_github.return_value = 'Successful Call!' + + # Call the function + response = list(u.github_issues( + upstream='org/repo', + config=self.mock_config + )) + + # Assert that calls were made correctly + try: + mock_get_all_github_data.assert_called_with( + 'https://api.github.com/repos/org/repo/issues?labels=custom_tag&filter1=filter1', + {'Authorization': 'token mock_token'} + ) + except AssertionError: + mock_get_all_github_data.assert_called_with( + 'https://api.github.com/repos/org/repo/issues?filter1=filter1&labels=custom_tag', + {'Authorization': 'token mock_token'} + ) + self.mock_github_client.get_user.assert_any_call('mock_login') + self.mock_github_client.get_user.assert_any_call('mock_assignee_login') + mock_issue_from_github.assert_called_with( + 'org/repo', + {'labels': ['some_label'], 'number': '1234', 'comments': [ + {'body': 'mock_body', 'name': 'mock_user_login', 'author': 'mock_username', 'changed': None, + 'date_created': 'mock_created_at', 'id': 'mock_id'}], 'assignees': [{'fullname': 'mock_name'}], + 'user': {'login': 'mock_login', 'fullname': 'mock_name'}, 'milestone': 'mock_milestone'}, + self.mock_config + ) + self.mock_github_client.get_repo.assert_called_with('org/repo') + self.mock_github_repo.get_issue.assert_called_with(number='1234') + self.mock_github_issue.get_comments.assert_any_call() + self.assertEqual(response[0], 'Successful Call!') + + @mock.patch('sync2jira.intermediary.Issue.from_github') + @mock.patch(PATH + 'Github') + @mock.patch(PATH + 'get_all_github_data') + def test_github_issues_no_token(self, + mock_get_all_github_data, + mock_github, + mock_issue_from_github): + """ + This function tests 'github_issues' function where we have no github token + and no comments + """ + # Set up return values + self.mock_config['sync2jira']['github_token'] = None + self.mock_github_issue_raw['comments'] = 0 + mock_github.return_value = self.mock_github_client + mock_get_all_github_data.return_value = [self.mock_github_issue_raw] + mock_issue_from_github.return_value = 'Successful Call!' + + # Call the function + response = list(u.github_issues( + upstream='org/repo', + config=self.mock_config + )) + + # Assert that calls were made correctly + try: + mock_get_all_github_data.assert_called_with( + 'https://api.github.com/repos/org/repo/issues?labels=custom_tag&filter1=filter1', + {} + ) + except AssertionError: + mock_get_all_github_data.assert_called_with( + 'https://api.github.com/repos/org/repo/issues?filter1=filter1&labels=custom_tag', + {} + ) + self.mock_github_client.get_user.assert_any_call('mock_login') + self.mock_github_client.get_user.assert_any_call('mock_assignee_login') + mock_issue_from_github.assert_called_with( + 'org/repo', + {'labels': ['some_label'], 'number': '1234', 'comments': [], 'assignees': [{'fullname': 'mock_name'}], + 'user': {'login': 'mock_login', 'fullname': 'mock_name'}, 'milestone': 'mock_milestone'}, + self.mock_config + ) + self.assertEqual(response[0], 'Successful Call!') + self.mock_github_client.get_repo.assert_not_called() + self.mock_github_repo.get_issue.assert_not_called() + self.mock_github_issue.get_comments.assert_not_called() + + @mock.patch('sync2jira.intermediary.Issue.from_pagure') + @mock.patch(PATH + 'requests') + def test_pagure_issues_error(self, + mock_requests, + mock_issue_from_pagure): + """ + This function tests 'pagure_issues' function where we get an IOError + """ + # Set up return values + get_return = MagicMock() + get_return.__bool__ = mock.Mock(return_value=False) + get_return.__nonzero__ = get_return.__bool__ + get_return.json.side_effect = Exception() + get_return.text.return_value = { + 'issues': [ + {'assignee': 'mock_assignee'} + ] + + } + mock_requests.get.return_value = get_return + + # Call the function + with self.assertRaises(IOError): + list(u.pagure_issues( + upstream='org/repo', + config=self.mock_config + )) + + # Assert everything was called correctly + mock_requests.get.assert_called_with( + 'https://pagure.io/api/0/org/repo/issues', + params={'filter1': 'filter1', 'tags': ['custom_tag']} + ) + mock_issue_from_pagure.assert_not_called() + + @mock.patch('sync2jira.intermediary.Issue.from_pagure') + @mock.patch(PATH + 'requests') + def test_pagure_issues(self, + mock_requests, + mock_issue_from_pagure): + """ + This function tests 'pagure_issues' function + """ + # Set up return values + get_return = MagicMock() + get_return.json.return_value = { + 'issues': [ + {'assignee': 'mock_assignee'} + ] + + } + get_return.request.url = 'mock_url' + mock_requests.get.return_value = get_return + mock_issue_from_pagure.return_value = 'Successful Call!' + + # Call the function + response = list(u.pagure_issues( + upstream='org/repo', + config=self.mock_config + )) + + # Assert everything was called correctly + self.assertEqual(response[0], 'Successful Call!') + mock_requests.get.assert_called_with( + 'https://pagure.io/api/0/org/repo/issues', + params={'filter1': 'filter1', 'tags': ['custom_tag']} + ) + mock_issue_from_pagure.assert_called_with( + 'org/repo', + {'assignee': ['mock_assignee']}, + self.mock_config + ) + + @mock.patch('sync2jira.intermediary.Issue.from_pagure') + def test_handle_pagure_message_not_in_mapped(self, + mock_issue_from_pagure): + """ + This function tests 'handle_pagure_message' where upstream is not in mapped repo + """ + # Set up return values + self.mock_pagure_message['msg']['project']['name'] = 'bad_repo' + # Call the function + response = u.handle_pagure_message( + msg=self.mock_pagure_message, + config=self.mock_config + ) + + # Assert all calls made correctly + self.assertEqual(None, response) + mock_issue_from_pagure.assert_not_called() + + @mock.patch('sync2jira.intermediary.Issue.from_pagure') + def test_handle_pagure_message_bad_filter(self, + mock_issue_from_pagure): + """ + This function tests 'handle_pagure_message' where comparing the actual vs. filter does not equate + """ + # Set up return values + self.mock_pagure_message['msg']['issue']['filter1'] = 'filter2' + + # Call function + response = u.handle_pagure_message( + msg=self.mock_pagure_message, + config=self.mock_config) + + # Assert that calls were made correctly + mock_issue_from_pagure.assert_not_called() + self.assertEqual(None, response) + + @mock.patch('sync2jira.intermediary.Issue.from_pagure') + def test_handle_pagure_message_bad_tag(self, + mock_issue_from_pagure): + """ + This function tests 'handle_pagure_message' where the tags do not match + """ + # Set up return values + self.mock_pagure_message['msg']['issue']['tags'] = ['bad_tags'] + + # Call function + response = u.handle_pagure_message( + msg=self.mock_pagure_message, + config=self.mock_config) + + # Assert that calls were made correctly + mock_issue_from_pagure.assert_not_called() + self.assertEqual(None, response) + + @mock.patch('sync2jira.intermediary.Issue.from_pagure') + def test_handle_pagure_message_successful(self, + mock_issue_from_pagure): + """ + This function tests 'handle_pagure_message' where everything goes smoothly + and we test edge cases! + """ + # Set up return values + mock_issue_from_pagure.return_value = "Successful Call!" + + # Call the function + response = u.handle_pagure_message( + msg=self.mock_pagure_message, + config=self.mock_config + ) + + # Assert that calls were made correctly + mock_issue_from_pagure.assert_called_with( + 'org/repo', + {'status': 'Dropped', 'assignee': ['mock_assignee'], 'filter1': 'filter1', 'comments': ['new_comment'], + 'tags': ['custom_tag', 'new_tag']}, + self.mock_config + ) + self.assertEqual(response, 'Successful Call!') + + @mock.patch(PATH + 'Github') + @mock.patch('sync2jira.intermediary.Issue.from_github') + def test_handle_github_message_not_in_mapped(self, + mock_issue_from_github, + mock_github): + """ + This function tests 'handle_github_message' where upstream is not in mapped repos + """ + # Set up return values + self.mock_github_message['msg']['repository']['owner']['login'] = 'bad_owner' + + # Call the function + response = u.handle_github_message( + msg=self.mock_github_message, + config=self.mock_config + ) + + # Assert that all calls were made correctly + mock_issue_from_github.assert_not_called() + mock_github.assert_not_called() + self.assertEqual(None, response) + + @mock.patch(PATH + 'Github') + @mock.patch('sync2jira.intermediary.Issue.from_github') + def test_handle_github_message_pull_request(self, + mock_issue_from_github, + mock_github): + """ + This function tests 'handle_github_message' the issue is a pull request comment + """ + # Set up return values + self.mock_github_message['msg']['issue'] = {'pull_request': 'test'} + + # Call the function + response = u.handle_github_message( + msg=self.mock_github_message, + config=self.mock_config + ) + + # Assert that all calls were made correctly + mock_issue_from_github.assert_not_called() + mock_github.assert_not_called() + self.assertEqual(None, response) + + @mock.patch('sync2jira.intermediary.Issue.from_github') + def test_handle_github_message_bad_filter(self, + mock_issue_from_github): + """ + This function tests 'handle_github_message' where comparing the actual vs. filter does not equate + """ + # Set up return values + self.mock_github_message['msg']['issue']['filter1'] = 'filter2' + + # Call function + response = u.handle_github_message( + msg=self.mock_github_message, + config=self.mock_config + ) + # Assert that calls were made correctly + mock_issue_from_github.assert_not_called() + self.assertEqual(None, response) + + @mock.patch('sync2jira.intermediary.Issue.from_github') + def test_handle_github_message_bad_label(self, + mock_issue_from_github): + """ + This function tests 'handle_github_message' where comparing the actual vs. filter does not equate + """ + # Set up return values + self.mock_github_message['msg']['issue']['labels'] = [{'name': 'bad_label'}] + + # Call function + response = u.handle_github_message( + msg=self.mock_github_message, + config=self.mock_config + ) + # Assert that calls were made correctly + mock_issue_from_github.assert_not_called() + self.assertEqual(None, response) + + @mock.patch(PATH + 'Github') + @mock.patch('sync2jira.intermediary.Issue.from_github') + def test_handle_github_message_no_comments(self, + mock_issue_from_github, + mock_github): + """ + This function tests 'handle_github_message' where we have no comments + """ + # Set up return values + mock_issue_from_github.return_value = "Successful Call!" + mock_github.return_value = self.mock_github_client + self.mock_github_message['msg']['issue']['comments'] = 0 + + # Call function + response = u.handle_github_message( + msg=self.mock_github_message, + config=self.mock_config + ) + # Assert that calls were made correctly + mock_issue_from_github.assert_called_with('org/repo', + {'labels': ['custom_tag'], 'number': 'mock_number', + 'comments': [], 'assignees': [{'fullname': 'mock_name'}], + 'filter1': 'filter1', + 'user': {'login': 'mock_login', 'fullname': 'mock_name'}, + 'milestone': 'mock_milestone'}, + self.mock_config) + mock_github.assert_called_with('mock_token', retry=5) + self.assertEqual('Successful Call!', response) + self.mock_github_client.get_repo.assert_not_called() + self.mock_github_repo.get_issue.assert_not_called() + self.mock_github_issue.get_comments.assert_not_called() + self.mock_github_client.get_user.assert_called_with('mock_login') + + @mock.patch(PATH + 'Github') + @mock.patch('sync2jira.intermediary.Issue.from_github') + def test_handle_github_message_successful(self, + mock_issue_from_github, + mock_github): + """ + This function tests 'handle_github_message' where everything goes smoothly! + """ + # Set up return values + mock_issue_from_github.return_value = "Successful Call!" + mock_github.return_value = self.mock_github_client + + # Call function + response = u.handle_github_message( + msg=self.mock_github_message, + config=self.mock_config + ) + + # Assert that calls were made correctly + mock_issue_from_github.assert_called_with('org/repo', + {'labels': ['custom_tag'], 'number': 'mock_number', + 'comments': [{'body': 'mock_body', 'name': 'mock_user_login', + 'author': 'mock_username', 'changed': None, + 'date_created': 'mock_created_at', 'id': 'mock_id'}], + 'assignees': [{'fullname': 'mock_name'}], + 'filter1': 'filter1', 'user': + {'login': 'mock_login', 'fullname': 'mock_name'}, + 'milestone': 'mock_milestone'}, self.mock_config) + mock_github.assert_called_with('mock_token', retry=5) + self.assertEqual('Successful Call!', response) + self.mock_github_client.get_repo.assert_called_with('org/repo') + self.mock_github_repo.get_issue.assert_called_with(number='mock_number') + self.mock_github_issue.get_comments.assert_any_call() + self.mock_github_client.get_user.assert_called_with('mock_login') + + @mock.patch(PATH + '_fetch_github_data') + @mock.patch(PATH + '_github_link_field_to_dict') + def test_get_all_github_data(self, + mock_github_link_field_to_dict, + mock_fetch_github_data): + """ + This tests the '_get_all_github_data' function + """ + # Set up return values + get_return = MagicMock() + get_return.json.return_value = [{'comments_url': 'mock_comments_url'}] + get_return.headers = {'link': 'mock_link'} + mock_fetch_github_data.return_value = get_return + + # Call the function + response = list(u.get_all_github_data( + url='mock_url', + headers='mock_headers' + )) + + # Assert everything was called correctly + mock_fetch_github_data.assert_any_call('mock_url', 'mock_headers') + mock_fetch_github_data.assert_any_call('mock_comments_url', 'mock_headers') + mock_github_link_field_to_dict.assert_called_with('mock_link') + self.assertEqual('mock_comments_url', response[0]['comments_url']) + + @mock.patch(PATH + 'requests') + def test_fetch_github_data_error(self, + mock_requests): + """ + Tests the '_fetch_github_data' function where we raise an IOError + """ + # Set up return values + get_return = MagicMock() + get_return.__bool__ = mock.Mock(return_value=False) + get_return.__nonzero__ = get_return.__bool__ + get_return.json.side_effect = Exception() + get_return.text.return_value = { + 'issues': [ + {'assignee': 'mock_assignee'} + ] + + } + mock_requests.get.return_value = get_return + + # Call the function + with self.assertRaises(IOError): + u._fetch_github_data( + url='mock_url', + headers='mock_headers' + ) + + # Assert everything was called correctly + mock_requests.get.assert_called_with('mock_url', headers='mock_headers') + + @mock.patch(PATH + 'requests') + def test_fetch_github_data(self, + mock_requests): + """ + Tests the '_fetch_github_data' function where everything goes smoothly! + """ + # Set up return values + get_return = MagicMock() + get_return.__bool__ = mock.Mock(return_value=True) + get_return.__nonzero__ = get_return.__bool__ + mock_requests.get.return_value = get_return + + # Call the function + + response = u._fetch_github_data( + url='mock_url', + headers='mock_headers' + ) + + # Assert everything was called correctly + mock_requests.get.assert_called_with('mock_url', headers='mock_headers') + self.assertEqual(response, get_return) diff --git a/tests/test_upstream_pr.py b/tests/test_upstream_pr.py new file mode 100644 index 0000000..1f438eb --- /dev/null +++ b/tests/test_upstream_pr.py @@ -0,0 +1,336 @@ +import mock +import unittest +try: + # Python 3.3 > + from unittest.mock import MagicMock # noqa: F401 +except ImportError: + from mock import MagicMock # noqa: F401 + + +import sync2jira.upstream_pr as u + + +PATH = 'sync2jira.upstream_pr.' + + +class TestUpstreamPR(unittest.TestCase): + """ + This class tests the upstream_pr.py file under sync2jira + """ + def setUp(self): + self.mock_config = { + 'sync2jira': { + 'map': { + 'github': { + 'org/repo': {'sync': ['pullrequest']}, + }, + 'pagure': { + 'org/repo': {'sync': ['pullrequest']}, + }, + }, + 'jira': { + # Nothing, really.. + }, + 'filters': { + 'github': + {'org/repo': {'filter1': 'filter1', 'labels': 'custom_tag'}}, + 'pagure': + {'org/repo': {'filter1': 'filter1', 'tags': ['custom_tag']}}, + }, + 'github_token': 'mock_token' + }, + } + # Mock Pagure Message + self.mock_pagure_message = { + 'msg': { + 'pullrequest': { + 'assignee': 'mock_assignee', + 'project': { + 'name': 'org/repo' + }, + 'issue': { + 'filter1': 'filter1', + 'tags': ['custom_tag'], + 'comments': [{ + 'date_created': '1234', + 'user': { + 'name': 'mock_name' + }, + 'comment': 'mock_body', + 'id': '1234', + }], + 'assignee': 'mock_assignee' + }, + 'tags': ['new_tag'], + 'comment': 'new_comment', + 'status': 'Open' + }, + 'topic': 'io.pagure.prod.pagure.issue.drop', + } + } + + # Mock Github Comment + self.mock_github_comment = MagicMock() + self.mock_github_comment.user.name = 'mock_username' + self.mock_github_comment.user.login = 'mock_user_login' + self.mock_github_comment.body = 'mock_body' + self.mock_github_comment.id = 'mock_id' + self.mock_github_comment.created_at = 'mock_created_at' + + # Mock Github Message + self.mock_github_message = { + 'msg': { + 'repository': { + 'owner': { + 'login': 'org' + }, + 'name': 'repo' + }, + 'pull_request': { + 'filter1': 'filter1', + 'labels': [{'name': 'custom_tag'}], + 'comments': ['some_comments!'], + 'number': 'mock_number', + 'user': { + 'login': 'mock_login' + }, + 'assignees': [{'login': 'mock_login'}], + 'milestone': { + 'title': 'mock_milestone' + } + }, + } + } + + # Mock github issue + self.mock_github_pr = MagicMock() + self.mock_github_pr.get_issue_comments.return_value = [self.mock_github_comment] + + # Mock Github Issue Raw + self.mock_github_issue_raw = { + 'comments': ['some comment'], + 'number': '1234', + 'user': { + 'login': 'mock_login' + }, + 'assignees': [{'login': 'mock_assignee_login'}], + 'labels': [{'name': 'some_label'}], + 'milestone': { + 'title': 'mock_milestone' + } + } + + # Mock Github Reporter + self.mock_github_person = MagicMock() + self.mock_github_person.name = 'mock_name' + + # Mock Github Repo + self.mock_github_repo = MagicMock() + self.mock_github_repo.get_pull.return_value = self.mock_github_pr + self.mock_github_repo.get_issue.return_value = self.mock_github_pr + + # Mock Github Client + self.mock_github_client = MagicMock() + self.mock_github_client.get_repo.return_value = self.mock_github_repo + self.mock_github_client.get_user.return_value = self.mock_github_person + + @mock.patch('sync2jira.intermediary.PR.from_pagure') + def test_handle_pagure_message(self, + mock_pr_from_pagure): + """ + This function tests 'handle_pagure_message' + """ + # Set up return values + mock_pr_from_pagure.return_value = "Successful Call!" + + # Call the function + response = u.handle_pagure_message( + msg=self.mock_pagure_message, + config=self.mock_config, + suffix='comment', + ) + + # Assert that calls were made correctly + mock_pr_from_pagure.assert_called_with( + 'org/repo', {'assignee': ['mock_assignee'], + 'project': {'name': 'org/repo'}, + 'issue': {'filter1': 'filter1', + 'tags': ['custom_tag'], + 'comments': + [{'date_created': '1234', 'user': + {'name': 'mock_name'}, + 'comment': 'mock_body', + 'id': '1234'}], + 'assignee': 'mock_assignee'}, + 'tags': ['new_tag'], + 'comment': 'new_comment', 'status': 'Open'}, + 'open', + self.mock_config, + ) + self.assertEqual(response, 'Successful Call!') + + @mock.patch('sync2jira.intermediary.PR.from_pagure') + def test_handle_pagure_message_not_in_mapped(self, + mock_pr_from_pagure): + """ + This function tests 'handle_pagure_message' where upstream is not in mapped repo + """ + # Set up return values + self.mock_pagure_message['msg']['pullrequest']['project']['name'] = 'bad_repo' + + # Call the function + response = u.handle_pagure_message( + msg=self.mock_pagure_message, + config=self.mock_config, + suffix='comment', + ) + + # Assert all calls made correctly + self.assertEqual(None, response) + mock_pr_from_pagure.assert_not_called() + + @mock.patch(PATH + 'Github') + @mock.patch('sync2jira.intermediary.PR.from_github') + def test_handle_github_message(self, + mock_pr_from_github, + mock_github): + """ + This function tests 'handle_github_message' + """ + # Set up return values + mock_pr_from_github.return_value = "Successful Call!" + mock_github.return_value = self.mock_github_client + + # Call function + response = u.handle_github_message( + msg=self.mock_github_message, + config=self.mock_config, + suffix='mock_suffix' + ) + + # Assert that calls were made correctly + mock_pr_from_github.assert_called_with( + 'org/repo', + {'filter1': 'filter1', 'labels': ['custom_tag'], + 'comments': [{'author': 'mock_username', + 'name': 'mock_user_login', + 'body': 'mock_body', 'id': 'mock_id', + 'date_created': 'mock_created_at', + 'changed': None}], 'number': 'mock_number', + 'user': {'login': 'mock_login', 'fullname': 'mock_name'}, + 'assignees': [{'fullname': 'mock_name'}], + 'milestone': 'mock_milestone'}, 'mock_suffix', self.mock_config) + mock_github.assert_called_with('mock_token') + self.assertEqual('Successful Call!', response) + self.mock_github_client.get_repo.assert_called_with('org/repo') + self.mock_github_repo.get_pull.assert_called_with(number='mock_number') + self.mock_github_pr.get_issue_comments.assert_any_call() + self.mock_github_client.get_user.assert_called_with('mock_login') + + @mock.patch(PATH + 'Github') + @mock.patch('sync2jira.intermediary.Issue.from_github') + def test_handle_github_message_not_in_mapped(self, + mock_issue_from_github, + mock_github): + """ + This function tests 'handle_github_message' where upstream is not in mapped repos + """ + # Set up return values + self.mock_github_message['msg']['repository']['owner']['login'] = 'bad_owner' + + # Call the function + response = u.handle_github_message( + msg=self.mock_github_message, + config=self.mock_config, + suffix='mock_suffix' + ) + + # Assert that all calls were made correctly + mock_issue_from_github.assert_not_called() + mock_github.assert_not_called() + self.assertEqual(None, response) + + @mock.patch('sync2jira.intermediary.PR.from_pagure') + @mock.patch(PATH + 'requests') + def test_pagure_issues(self, + mock_requests, + mock_pr_from_pagure): + """ + This function tests 'pagure_issues' function + """ + # Set up return values + get_return = MagicMock() + get_return.json.return_value = { + 'requests': [ + {'assignee': 'mock_assignee'} + ] + + } + get_return.request.url = 'mock_url' + mock_requests.get.return_value = get_return + mock_pr_from_pagure.return_value = 'Successful Call!' + + # Call the function + response = list(u.pagure_prs( + upstream='org/repo', + config=self.mock_config, + )) + + # Assert everything was called correctly + self.assertEqual(response[0], 'Successful Call!') + mock_requests.get.assert_called_with( + 'https://pagure.io/api/0/org/repo/pull-requests', + params={'filter1': 'filter1', 'tags': ['custom_tag']} + ) + mock_pr_from_pagure.assert_called_with( + 'org/repo', + {'assignee': ['mock_assignee']}, + 'open', + self.mock_config + ) + + @mock.patch('sync2jira.intermediary.PR.from_github') + @mock.patch(PATH + 'Github') + @mock.patch(PATH + 'u_issue.get_all_github_data') + def test_github_issues(self, + mock_get_all_github_data, + mock_github, + mock_pr_from_github): + """ + This function tests 'github_issues' function + """ + # Set up return values + mock_github.return_value = self.mock_github_client + mock_get_all_github_data.return_value = [self.mock_github_issue_raw] + mock_pr_from_github.return_value = 'Successful Call!' + + # Call the function + response = list(u.github_prs( + upstream='org/repo', + config=self.mock_config + )) + + # Assert that calls were made correctly + mock_get_all_github_data.assert_called_with( + 'https://api.github.com/repos/org/repo/pulls?filter1=filter1&labels=custom_tag', + {'Authorization': 'token mock_token'} + ) + self.mock_github_client.get_user.assert_any_call('mock_login') + self.mock_github_client.get_user.assert_any_call('mock_assignee_login') + mock_pr_from_github.assert_called_with( + 'org/repo', + {'comments': + [{'author': 'mock_username', 'name': 'mock_user_login', + 'body': 'mock_body', 'id': 'mock_id', + 'date_created': 'mock_created_at', 'changed': None}], + 'number': '1234', 'user': + {'login': 'mock_login', 'fullname': 'mock_name'}, + 'assignees': [{'fullname': 'mock_name'}], + 'labels': ['some_label'], 'milestone': 'mock_milestone'}, + 'open', + self.mock_config + ) + self.mock_github_client.get_repo.assert_called_with('org/repo') + self.mock_github_repo.get_pull.assert_called_with(number='1234') + self.mock_github_pr.get_issue_comments.assert_any_call() + self.assertEqual(response[0], 'Successful Call!') diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..db66d03 --- /dev/null +++ b/tox.ini @@ -0,0 +1,32 @@ +[tox] +envlist = py39,lint + +[testenv] +passenv = TRAVIS TRAVIS_* +setenv = + DEFAULT_FROM = mock_email@mock.com + DEFAULT_SERVER = mock_server + INITIALIZE=1 + CONFLUENCE_SPACE=mock_confluence_space + CONFLUENCE_PAGE_TITLE=mock_confluence_page_title + CONFLUENCE_URL=http://mock_confluence_url + CONFLUENCE_USERNAME=mock_confluence_username + CONFLUENCE_PASSWORD=mock_confluence_password +basepython = + py39: python3.9 +deps = + -r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt +sitepackages = True +whitelist_externals = /usr/bin/flake8 +commands = + coverage run -m pytest {posargs} --ignore=tests/integration_tests +# Add the following line locally to get an HTML report --cov-report html:htmlcov-py39 + +[testenv:lint] +skip_install = true +basepython = python3.9 +deps = + flake8 +commands = + flake8 sync2jira --max-line-length=140