from setuptools import setup
import os
with open('requirements.txt', 'rb') as f:
install_requires = f.read().decode('utf-8').split('\n')
+ if not os.getenv('READTHEDOCS'):
+ install_requires.append('requests-kerberos')
+
+with open('test-requirements.txt', 'rb') as f:
+ test_requires = f.read().decode('utf-8').split('\n')
setup(
- name='webhook-sync2jira',
+ name='sync2jira',
version=2.0,
- description="Sync Github issues to JIRA, via Github Webhooks",
- author='Sid Premkumar',
- author_email='sid@bastionzero.com',
- url='https://github.com/cwcrypto/WebHookSync2Jira',
+ description="Sync pagure and github issues to jira, via fedmsg",
+ author='Ralph Bean',
+ author_email='rbean@redhat.com',
+ url='https://pagure.io/sync-to-jira',
license='LGPLv2+',
classifiers=[
"Development Status :: 5 - Production/Stable",
@@ -39,6 +44,8 @@
"Programming Language :: Python :: 3",
],
install_requires=install_requires,
+ tests_require=test_requires,
+ test_suite='nose.collector',
packages=[
'sync2jira',
],
@@ -47,6 +54,8 @@
entry_points={
'console_scripts': [
"sync2jira=sync2jira.main:main",
+ "sync2jira-list-managed-urls=sync2jira.main:list_managed",
+ "sync2jira-close-duplicates=sync2jira.main:close_duplicates",
],
},
)
diff --git a/sync-page/__init__.py b/sync-page/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/sync-page/assets/font.css b/sync-page/assets/font.css
new file mode 100644
index 0000000..c795562
--- /dev/null
+++ b/sync-page/assets/font.css
@@ -0,0 +1,162 @@
+
+ @font-face {
+ font-family: "overpass";
+ font-style: normal;
+ font-weight: 200;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.eot");
+ /* IE9 Compat Modes */
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin.ttf") format("truetype");
+ /* Safari, Android, iOS */
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: italic;
+ font-weight: 200;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-thin-italic.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: normal;
+ font-weight: 300;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: italic;
+ font-weight: 300;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extralight-italic.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: normal;
+ font-weight: 400;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: italic;
+ font-weight: 400;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-light-italic.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: normal;
+ font-weight: 500;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-regular.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: italic;
+ font-weight: 500;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-italic.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: normal;
+ font-weight: 600;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: italic;
+ font-weight: 600;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-semibold-italic.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: normal;
+ font-weight: 700;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: italic;
+ font-weight: 700;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-bold-italic.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: normal;
+ font-weight: 800;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: italic;
+ font-weight: 800;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-extrabold-italic.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: normal;
+ font-weight: 900;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass";
+ font-style: italic;
+ font-weight: 900;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-webfont/overpass-heavy-italic.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass-mono";
+ font-style: normal;
+ font-weight: 300;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-light.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass-mono";
+ font-style: normal;
+ font-weight: 400;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-regular.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass-mono";
+ font-style: normal;
+ font-weight: 500;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-semibold.ttf") format("truetype");
+ }
+
+ @font-face {
+ font-family: "overpass-mono";
+ font-style: normal;
+ font-weight: 600;
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.eot");
+ src: url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.eot?#iefix") format("embedded-opentype"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.woff2") format("woff2"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.woff") format("woff"), url("https://unpkg.com/@patternfly/patternfly@2.40.13/assets/fonts/overpass-mono-webfont/overpass-mono-bold.ttf") format("truetype");
+ }
diff --git a/sync-page/assets/redhat-favicon.ico b/sync-page/assets/redhat-favicon.ico
new file mode 100644
index 0000000..9072de7
Binary files /dev/null and b/sync-page/assets/redhat-favicon.ico differ
diff --git a/sync-page/event-handler.py b/sync-page/event-handler.py
new file mode 100644
index 0000000..43c7003
--- /dev/null
+++ b/sync-page/event-handler.py
@@ -0,0 +1,76 @@
+# Build-In Modules
+import logging
+import os
+
+# 3rd Party Modules
+from flask import Flask, render_template, request, redirect
+
+# Local Modules
+from sync2jira.main import load_config, initialize_pr, initialize_issues
+
+# Global Variables
+app = Flask(__name__, static_url_path = "/assets", static_folder = "assets")
+BASE_URL = os.environ['BASE_URL']
+REDIRECT_URL = os.environ['REDIRECT_URL']
+config = load_config()
+
+# Set up our logging
+FORMAT = "[%(asctime)s] %(levelname)s: %(message)s"
+logging.basicConfig(format=FORMAT, level=logging.INFO)
+logging.basicConfig(format=FORMAT, level=logging.DEBUG)
+logging.basicConfig(format=FORMAT, level=logging.WARNING)
+log = logging.getLogger('sync2jira-sync-page')
+
+
+@app.route('/handle-event', methods=['POST'])
+def handle_event():
+ """
+ Handler for when a user wants to sync a repo
+ """
+ response = request.form
+ synced_repos = []
+ for repo_name, switch in response.items():
+ if switch == "on":
+ # Sync repo_name
+ log.info(f"Starting sync for repo: {repo_name}")
+ initialize_issues(config, repo_name=repo_name)
+ initialize_pr(config, repo_name=repo_name)
+ synced_repos.append(repo_name)
+ if synced_repos:
+ return render_template('sync-page-success.jinja',
+ synced_repos=synced_repos,
+ url=f"http://{REDIRECT_URL}")
+ else:
+ return render_template('sync-page-failure.jinja',
+ url=f"http://{REDIRECT_URL}")
+
+
+@app.route('/', methods=['GET'])
+def index():
+ """
+ Return relevant redirect
+ """
+ return redirect("/github")
+
+@app.route('/github', methods=['GET'])
+def github():
+ """
+ Github Sync Page
+ """
+ # Build and return our updated HTML page
+ return render_template('sync-page-github.jinja',
+ github=config['sync2jira']['map']['github'],
+ url=f"http://{REDIRECT_URL}")
+
+@app.route('/pagure', methods=['GET'])
+def pagure():
+ """
+ Pagure Sync Page
+ """
+ # Build and return our updated HTML page
+ return render_template('sync-page-pagure.jinja',
+ pagure=config['sync2jira']['map']['pagure'],
+ url=f"http://{REDIRECT_URL}")
+
+if __name__ == '__main__':
+ app.run(host=BASE_URL)
diff --git a/sync-page/templates/sync-page-failure.jinja b/sync-page/templates/sync-page-failure.jinja
new file mode 100644
index 0000000..5f64b0d
--- /dev/null
+++ b/sync-page/templates/sync-page-failure.jinja
@@ -0,0 +1,70 @@
+
+
+
+
+
+
+
+
+
+
+ Sync2Jira - Sync Page
+
+
+
+
+
+
+
+
+
+
Failed to started syncing
+
Make sure you selected at least one repo!
+
+
+
+
+
+
+
diff --git a/sync-page/templates/sync-page-github.jinja b/sync-page/templates/sync-page-github.jinja
new file mode 100644
index 0000000..fd9dbbf
--- /dev/null
+++ b/sync-page/templates/sync-page-github.jinja
@@ -0,0 +1,77 @@
+
+
+
+
+
+
+
+
+
+
+ Sync2Jira - Sync Page
+
+
+
+
+
+
+
+
+
GitHub Sync
+
Select the repos you would like to be re-syned!
+
+
+
+
+
+
+
diff --git a/sync-page/templates/sync-page-pagure.jinja b/sync-page/templates/sync-page-pagure.jinja
new file mode 100644
index 0000000..0038ff4
--- /dev/null
+++ b/sync-page/templates/sync-page-pagure.jinja
@@ -0,0 +1,77 @@
+
+
+
+
+
+
+
+
+
+
+ Sync2Jira - Sync Page
+
+
+
+
+
+
+
+
+
Pagure Sync
+
Select the repos you would like to be re-synced!
+
+
+
+
+
+
+
diff --git a/sync-page/templates/sync-page-success.jinja b/sync-page/templates/sync-page-success.jinja
new file mode 100644
index 0000000..440ee78
--- /dev/null
+++ b/sync-page/templates/sync-page-success.jinja
@@ -0,0 +1,74 @@
+
+
+
+
+
+
+
+
+
+
+ Sync2Jira - Sync Page
+
+
+
+
+
+
+
+
+
+
Successfully started syncing the following repos:
+
+ {% for repo_name in synced_repos %}
+ {{ repo_name }}
+ {% endfor %}
+
+
+
+
+
+
+
+
diff --git a/sync2jira/confluence_client.py b/sync2jira/confluence_client.py
new file mode 100644
index 0000000..51e19be
--- /dev/null
+++ b/sync2jira/confluence_client.py
@@ -0,0 +1,245 @@
+#!/usr/bin/python3
+"""
+This script acts as a client to confluence, connects to confluence and create
+pages
+"""
+import logging
+import os
+import re
+import requests
+from requests.auth import HTTPBasicAuth
+import jinja2
+import datetime
+
+# Global Variables
+log = logging.getLogger('sync2jira')
+
+
+class ConfluenceClient:
+
+ """ A conflence component used to connect to confluence and perform
+ confluence related tasks
+ """
+
+ def __init__(
+ self,
+ confluence_space=os.environ.get("CONFLUENCE_SPACE"),
+ confluence_page_title=os.environ.get("CONFLUENCE_PAGE_TITLE"),
+ confluence_url=os.environ.get("CONFLUENCE_URL"),
+ username=os.environ.get("CONFLUENCE_USERNAME"),
+ password=os.environ.get("CONFLUENCE_PASSWORD"),
+ auth_type="basic",
+ ):
+ """ Returns confluence client object
+ :param string confluence_space : space to be used in confluence
+ :param string confluence_page_title : Title of page to be created in
+ confluence
+ :param string confluence_url : url to connect confluence
+ :param string username : optional username for basic auth
+ :param string password : optional password for basic auth
+ :param string auth_type : indicate auth scheme (basic/kerberos)
+ """
+ self.confluence_space = confluence_space
+ self.confluence_page_title = confluence_page_title
+ self.confluence_url = confluence_url
+ self.confluence_rest_url = self.confluence_url + "/rest/api/content/"
+ self.username = username
+ self.password = password
+ self.authtype = auth_type
+ self.update_stat = False
+ self._req_kwargs = None
+
+ # Find our page ID and save it
+ resp = self.find_page()
+ if not resp:
+ raise ValueError("Invalid page name")
+ self.page_id = resp
+
+ def update_stat_value(self, new_value):
+ """ Update the 'update_stat' attribute.
+ :param Bool new_value: Bool value
+ """
+ self.update_stat = new_value
+
+ @property
+ def req_kwargs(self):
+ """ Set the key-word arguments for python-requests depending on the
+ auth type. This code should run on demand exactly once, which is
+ why it is a property.
+ :return dict _req_kwargs: dict with the right options to pass in
+ """
+ if self._req_kwargs is None:
+ if self.authtype == "basic":
+ self._req_kwargs = {"auth": self.get_auth_object()}
+ return self._req_kwargs
+
+ def update_stat_page(self, confluence_data):
+ """
+ Updates the statistic page with more data
+ :param dict confluence_data: Variable amount of new data
+ """
+ try:
+ # Get the HTML to update
+ page_info = self.get_page_info(self.page_id)
+ page_html = page_info['body']['storage']['value']
+ # Maintain and update our final data
+ confluence_data_update = {
+ 'Created Issues': 0,
+ 'Descriptions': 0,
+ 'Comments': 0,
+ 'Reporters': 0,
+ 'Status': 0,
+ 'Assignees': 0,
+ 'Transitions': 0,
+ 'Title': 0,
+ 'Tags': 0,
+ 'FixVersion': 0,
+ 'Misc. Fields': 0,
+ 'Total': 0
+ }
+ confluence_data_times = {
+ 'Created Issues': 60,
+ 'Descriptions': 30,
+ 'Comments': 30,
+ 'Reporters': 30,
+ 'Assignees': 15,
+ 'Status': 30,
+ 'Transitions': 30,
+ 'Title': 15,
+ 'Tags': 10,
+ 'FixVersion': 10,
+ 'Misc. Fields': 15,
+ }
+ # Use these HTML patterns to search for previous values
+ confluence_html_patterns = {
+ 'Created Issues': "Created Issues",
+ 'Descriptions': "Descriptions ",
+ 'Comments': "Comments ",
+ 'Reporters': "Reporters ",
+ 'Assignees': "Assignees ",
+ 'Status': "Status ",
+ 'Transitions': "Transitions ",
+ 'Title': "Titles ",
+ 'Tags': "Tags ",
+ 'FixVersion': "Fix Version ",
+ 'Misc. Fields': "Misc. Fields ",
+ }
+ # Update all our data
+ total = 0
+ for topic, html in confluence_html_patterns.items():
+ # Search for previous data
+ try:
+ ret = re.search(html, page_html)
+ start_index = ret.span()[1]
+ new_val = ""
+ while page_html[start_index] != "<":
+ new_val += page_html[start_index]
+ start_index += 1
+ confluence_data_update[topic] = int(new_val)
+ total += int(new_val)
+ except AttributeError:
+ log.warning(f"Confluence failed on parsing {topic}")
+ total += 0
+ confluence_data_update[topic] = 0
+
+ # Now add new data
+ for topic in confluence_html_patterns.keys():
+ if topic in confluence_data:
+ confluence_data_update[topic] += confluence_data[topic]
+ total += confluence_data[topic]
+ confluence_data_update["Total"] = total
+
+ # Calculate Total Time
+ total_time = 0
+ for topic in confluence_data_times.keys():
+ total_time += confluence_data_update[topic] * confluence_data_times[topic]
+ total_time = datetime.timedelta(seconds=total_time)
+ confluence_data_update["Total Time"] = str(total_time) + " (HR:MIN:SEC)"
+
+ # Build our updated HTML page
+ templateLoader = jinja2.FileSystemLoader(
+ searchpath='usr/local/src/sync2jira/sync2jira/')
+ templateEnv = jinja2.Environment(loader=templateLoader)
+ template = templateEnv.get_template('confluence_stat.jinja')
+ html_text = template.render(confluence_data=confluence_data_update)
+
+ # Finally update our page
+ if html_text.replace(" ", "") != page_html.replace(" ", ""):
+ self.update_page(self.page_id, html_text)
+ except: # noqa E722
+ log.exception("Something went wrong updating confluence!")
+
+ def find_page(self):
+ """ finds the page with confluence_page_title in confluence_space
+ return string page_id : id of the page if found, otherwise None
+ """
+ search_url = (
+ self.confluence_url
+ + "/rest/api/content/search?cql=title='"
+ + self.confluence_page_title
+ + "' and "
+ + "space="
+ + self.confluence_space
+ )
+ resp = requests.get(search_url, **self.req_kwargs)
+ if len(resp.json()["results"]) > 0:
+ return resp.json()["results"][0].get("id", None)
+ else:
+ return None
+
+ def get_page_info(self, page_id):
+ """Gives information like ancestors,version of a page
+ :param string page_id: id of the confluence page
+ :return json conf_resp: response from the confluence
+ """
+ conf_rest_url = (
+ self.confluence_url
+ + "/rest/api/content/"
+ + page_id
+ + "?expand=ancestors,version,body.storage"
+ )
+ resp = requests.get(conf_rest_url, **self.req_kwargs)
+ return resp.json()
+
+ def update_page(self, page_id, html_str):
+ """
+ Updates the page with id page_id
+ :param string page_id: id of the page
+ :param string html_str : html_str content of the page
+ :return json conf_resp: response from the confluence
+ """
+ rest_url = self.confluence_rest_url + page_id
+ info = self.get_page_info(page_id)
+ updated_page_version = int(info["version"]["number"] + 1)
+
+ data = {
+ "id": str(page_id),
+ "type": "page",
+ "title": info["title"],
+ "version": {"number": updated_page_version},
+ "body": {"storage": {"representation": "storage", "value": html_str}},
+ }
+ resp = requests.put(rest_url, json=data, **self.req_kwargs)
+ if not resp.ok:
+ log.error("Error updating confluence page!\nConfluence response: %s\n", resp.json())
+
+ return resp.json()
+
+ def get_auth_object(self):
+ """Returns Auth object based on auth type
+ :return : Auth Object
+ """
+ if self.authtype == "basic":
+ return HTTPBasicAuth(self.username, self.password)
+
+
+if os.environ.get('CONFLUENCE_SPACE') != 'mock_confluence_space':
+ confluence_client = ConfluenceClient()
+else:
+ # Else we are testing, and create a mock_client
+ class mock_confluence_client(object):
+ mock_data = False
+ update_stat = False
+ def update_stat_value(self, **kwargs): return
+ def update_stat_page(self, **kwargs): return
+ confluence_client = mock_confluence_client()
diff --git a/sync2jira/confluence_stat.jinja b/sync2jira/confluence_stat.jinja
new file mode 100644
index 0000000..9c9d55b
--- /dev/null
+++ b/sync2jira/confluence_stat.jinja
@@ -0,0 +1,55 @@
+
+
+
+
+
+
+
+
+
+
+ Type of Sync
+ Number of Syncs
+ Avg. Time (Seconds)
+
+
+ Created Issues {{ confluence_data['Created Issues'] }} 60
+
+
+ Descriptions {{ confluence_data['Descriptions'] }} 30
+
+
+ Comments {{ confluence_data['Comments'] }} 30
+
+
+ Reporters {{ confluence_data['Reporters'] }} 30
+
+
+ Assignees {{ confluence_data['Assignees'] }} 15
+
+
+ Status {{ confluence_data['Status'] }} 30
+
+
+ Transitions {{ confluence_data['Transitions'] }} 30
+
+
+ Titles {{ confluence_data['Title'] }} 15
+
+
+ Tags {{ confluence_data['Tags'] }} 5
+
+
+ Fix Version {{ confluence_data['FixVersion'] }} 5
+
+
+ Misc. Fields {{ confluence_data['Misc. Fields'] }} 15
+
+
+ Total {{ confluence_data['Total'] }} {{ confluence_data['Total Time'] }}
+
+
+
+
+
+
\ No newline at end of file
diff --git a/sync2jira/downstream_issue.py b/sync2jira/downstream_issue.py
index b05e783..133066e 100644
--- a/sync2jira/downstream_issue.py
+++ b/sync2jira/downstream_issue.py
@@ -33,6 +33,8 @@
# Local Modules
from sync2jira.intermediary import Issue, PR
+from sync2jira.mailer import send_mail
+from sync2jira.confluence_client import confluence_client
# The date the service was upgraded
# This is used to ensure legacy comments are not touched
@@ -40,10 +42,29 @@
log = logging.getLogger('sync2jira')
+remote_link_title = "Upstream issue"
duplicate_issues_subject = 'FYI: Duplicate Sync2jira Issues'
jira_cache = {}
+
+def check_jira_status(client):
+ """
+ Function tests the status of the JIRA server.
+
+
+ :param jira.client.JIRA client: JIRA client
+ :return: True/False if the server is up
+ :rtype: Bool
+ """
+ # Search for any issue remote title
+ ret = client.search_issues("issueFunction in linkedIssuesOfRemote('*')")
+ if len(ret) < 1:
+ # If we did not find anything return false
+ return False
+ return True
+
+
def _comment_format(comment):
"""
Function to format JIRA comments.
@@ -57,6 +78,20 @@ def _comment_format(comment):
comment['id'], comment['author'], pretty_date, comment['body'])
+def _comment_format_legacy(comment):
+ """
+ Legacy function to format JIRA comments.
+ This is still used to match comments so no
+ duplicates are created.
+
+ :param dict comment: Upstream comment
+ :returns: Comments formatted
+ :rtype: String
+ """
+ return "Upstream, %s wrote:\n\n{quote}\n%s\n{quote}" % (
+ comment['name'], comment['body'])
+
+
def get_jira_client(issue, config):
"""
Function to match and create JIRA client.
@@ -84,7 +119,7 @@ def get_jira_client(issue, config):
if not jira_instance:
log.error("No jira_instance for issue and there is no default in the config")
raise Exception
-
+
client = jira.client.JIRA(**config['sync2jira']['jira'][jira_instance])
return client
@@ -100,10 +135,12 @@ def _matching_jira_issue_query(client, issue, config, free=False):
:returns: results: Returns a list of matching JIRA issues if any are found
:rtype: List
"""
- # Searches for any remote link to the issue.url\
- issue_title = issue.title.replace('[', '').replace(']', '')
- query = f'summary ~ "{issue_title}"'
-
+ # Searches for any remote link to the issue.url
+ query = 'issueFunction in linkedIssuesOfRemote("%s") and ' \
+ 'issueFunction in linkedIssuesOfRemote("%s")' % (
+ remote_link_title, issue.url)
+ if free:
+ query += ' and statusCategory != Done'
# Query the JIRA client and store the results
results_of_query = client.search_issues(query)
if len(results_of_query) > 1:
@@ -150,10 +187,122 @@ def _matching_jira_issue_query(client, issue, config, free=False):
# Return the final_results
log.debug("Found %i results for query %r", len(final_results), query)
+ # Alert the owner
+ if issue.downstream.get('owner'):
+ alert_user_of_duplicate_issues(issue, final_results,
+ results_of_query,
+ config, client)
return final_results
else:
return results_of_query
+
+def alert_user_of_duplicate_issues(issue, final_result, results_of_query,
+ config, client):
+ """
+ Alerts owner of duplicate downstream issues.
+
+ :param sync2jira.intermediate.Issue issue: Upstream Issue object
+ :param List final_result: Issue selected by matching algorithm
+ :param List results_of_query: Result of JQL query
+ :param Dict config: Config dict
+ :param jira.client.JIRA client: JIRA client
+ :returns: Nothing
+ """
+ # First remove final_result from results_of_query
+ results_of_query.remove(final_result[0])
+
+ # Check that all duplicate issues are closed
+ updated_results = []
+ for result in results_of_query:
+ if result.fields.status.name != 'Closed':
+ updated_results.append(result)
+ if not updated_results:
+ # Nothing to alert the owner of
+ return
+
+ # Get base URL
+ jira_instance = issue.downstream.get('jira_instance', False)
+ if not jira_instance:
+ jira_instance = config['sync2jira'].get('default_jira_instance', False)
+ if not jira_instance:
+ log.error("No jira_instance for issue and there is no default in the config")
+ raise Exception
+ base_url = config['sync2jira']['jira'][jira_instance]['options']['server'] + '/browse/'
+
+ # Format the updated results
+ template_ready = []
+ for update in updated_results:
+ url = base_url + update.key
+ new_entry = {'url': url, 'title': update.key}
+ template_ready.append(new_entry)
+
+ # Get owner name and email from Jira
+ ret = client.search_users(issue.downstream.get('owner'))
+ if len(ret) > 1:
+ log.warning('Found multiple users for username %s' % issue.downstream.get('owner'))
+ found = False
+ for person in ret:
+ if person.key == issue.downstream.get('owner'):
+ ret = [person]
+ found = True
+ break
+ if not found:
+ log.warning('Could not find JIRA user for username %s' % issue.downstream.get('owner'))
+ if not ret:
+ message = 'No owner could be found for username %s' % issue.downstream.get('owner')
+ log.warning(message.strip())
+ return
+
+ user = {'name': ret[0].displayName, 'email': ret[0].emailAddress}
+
+ # Format selected issue
+ selected_issue = {'url': base_url + final_result[0].key,
+ 'title': final_result[0].key}
+
+ # Get admin information
+ admins = []
+ admin_template = []
+ for admin in config['sync2jira']['admins']:
+ admin_username = [name for name in admin][0]
+ ret = client.search_users(admin_username)
+ if len(ret) > 1:
+ log.warning('Found multiple users for admin %s' % list(admin.keys())[0])
+ found = False
+ for person in ret:
+ if person.key == issue.downstream.get('owner'):
+ ret = [person]
+ found = True
+ break
+ if not found:
+ log.warning('Could not find JIRA user for admin %s' % list(admin.keys())[0])
+ if not ret:
+ message = 'No admin could be found for username %s' % list(admin.keys())[0]
+ log.warning(message.strip())
+ raise ValueError(message)
+ admins.append(ret[0].emailAddress)
+ admin_template.append({'name': ret[0].displayName, 'email': ret[0].emailAddress})
+
+ # Create and send email
+ templateLoader = jinja2.FileSystemLoader(
+ searchpath='usr/local/src/sync2jira/sync2jira/')
+ templateEnv = jinja2.Environment(loader=templateLoader)
+ template = templateEnv.get_template('email_template.jinja')
+ html_text = template.render(user=user,
+ admins=admin_template,
+ issue=issue,
+ selected_issue=selected_issue,
+ duplicate_issues=template_ready)
+
+ # Send mail
+ send_mail(recipients=[user['email']],
+ cc=admins,
+ subject=duplicate_issues_subject,
+ text=html_text)
+ log.info('Alerted %s about %s duplicate issue(s)' %
+ (user['email'], len(template_ready)))
+
+
def find_username(issue, config):
"""
Finds JIRA username for an issue object.
@@ -203,7 +352,12 @@ def _find_comment_in_jira(comment, j_comments):
:rtype: jira.resource.Comment/None
"""
formatted_comment = _comment_format(comment)
+ legacy_formatted_comment = _comment_format_legacy(comment)
for item in j_comments:
+ if item.raw['body'] == legacy_formatted_comment:
+ # If the comment is in the legacy comment format
+ # return the item
+ return item
if str(comment['id']) in item.raw['body']:
# The comment id's match, if they dont have the same body,
# we need to edit the comment
@@ -258,6 +412,28 @@ def _get_existing_jira_issue(client, issue, config):
return None
+def _get_existing_jira_issue_legacy(client, issue, config):
+ """
+ This is our old way of matching issues: use the special url field.
+ This will be phased out and removed in a future release.
+
+ """
+
+ kwargs = dict(issue.downstream.items())
+ kwargs["External issue URL"] = "%s" % issue.url
+ kwargs = sorted(kwargs.items(), key=operator.itemgetter(0))
+
+ query = " AND ".join([
+ "=".join(["'%s'" % k, "'%s'" % v]) for k, v in kwargs
+ if v is not None
+ ]) + " AND (resolution is null OR resolution = Duplicate)"
+ results = client.search_issues(query)
+ if results:
+ return results[0]
+ else:
+ return None
+
+
def attach_link(client, downstream, remote_link):
"""
Attaches the upstream link to the JIRA ticket.
@@ -269,11 +445,7 @@ def attach_link(client, downstream, remote_link):
:rtype: jira.resources.Issue
"""
log.info("Attaching tracking link %r to %r", remote_link, downstream.key)
- if (downstream.fields.description is None):
- previous_description = ""
- else:
- previous_description = downstream.fields.description
- modified_desc = previous_description + " "
+ modified_desc = downstream.fields.description + " "
# This is crazy. Querying for application links requires admin perms which
# we don't have, so duckpunch the client to think it has already made the
@@ -287,11 +459,69 @@ def attach_link(client, downstream, remote_link):
# gets re-indexed, otherwise our searches won't work. Also, Handle some
# weird API changes here...
log.debug("Modifying desc of %r to trigger re-index.", downstream.key)
- # downstream.update({'description': modified_desc})
+ downstream.update({'description': modified_desc})
return downstream
+def _upgrade_jira_issue(client, downstream, issue, config):
+ """
+ Given an old legacy-style downstream issue...
+ ...upgrade it to a new-style issue.
+ Simply mark it with an external-url field value.
+ """
+ log.info("Upgrading %r %r issue for %r", downstream.key, issue.downstream, issue)
+ if config['sync2jira']['testing']:
+ log.info("Testing flag is true. Skipping actual upgrade.")
+ return
+
+ # Do it!
+ remote_link = dict(url=issue.url, title=remote_link_title)
+ attach_link(client, downstream, remote_link)
+
+
+def assign_user(client, issue, downstream, remove_all=False):
+ """
+ Attempts to assigns a JIRA issue to the correct
+ user based on the issue.
+
+ :param jira.client.JIRA client: JIRA Client
+ :param sync2jira.intermediary.Issue issue: Issue object
+ :param jira.resources.Issue downstream: JIRA issue object
+ :param Bool remove_all: Flag to indicate if we should reset the assignees in the JIRA issue
+ :returns: Nothing
+ """
+ # If removeAll flag, then we need to reset the assignees
+ if remove_all:
+ # Update the issue to have no assignees
+ downstream.update(assignee={'name': ''})
+ # Then we're done! And we can go back !
+ return
+
+ # JIRA only supports one assignee
+ # If we have more than one assignee (i.e. from Github)
+ # assign the issue to the first user (i.e. issue.assignee[0])
+
+ # First we need to find the user
+ # Make API call to get a list of users
+ users = client.search_assignable_users_for_issues(
+ issue.assignee[0]['fullname'],
+ project=issue.downstream['project'])
+ # Loop through the query
+ for user in users:
+ if user.displayName == issue.assignee[0]['fullname']:
+ # Then we can assign the issue to the user
+ downstream.update({'assignee': {'name': user.key}})
+ return
+ # If there is an owner, assign it to them
+ if issue.downstream.get('owner'):
+ client.assign_issue(downstream.id, issue.downstream.get('owner'))
+ log.warning('Assigned %s to owner: %s' %
+ (issue.title, issue.downstream.get('owner')))
+ return
+ log.warning('Was not able to assign user %s' % issue.assignee[0]['fullname'])
+
+
def change_status(client, downstream, status, issue):
"""
Change status of JIRA issue.
@@ -312,6 +542,9 @@ def change_status(client, downstream, status, issue):
try:
client.transition_issue(downstream, id)
log.info('Updated downstream to %s status for issue %s' % (status, issue.title))
+ if confluence_client.update_stat:
+ confluence_data = {'Transition': 1}
+ confluence_client.update_stat_page(confluence_data)
except JIRAError:
log.error('Updating downstream issue failed for %s: %s' % (status, issue.title))
else:
@@ -335,17 +568,14 @@ def _create_jira_issue(client, issue, config):
return
custom_fields = issue.downstream.get('custom_fields', {})
+ default_type = issue.downstream.get('type', "Bug")
- # Determine the type of issue based on the tags available
- issue_type = 'Bug'
- if ('story' in issue.tags):
- issue_type = 'Story'
- elif ('task' in issue.tags):
- issue_type = 'Task'
+ confluence_data = {'Misc. Fields': 0, 'Created Issues': 1}
# Build the description of the JIRA issue
if 'description' in issue.downstream.get('issue_updates', {}):
description = "Upstream description: {quote}%s{quote}" % issue.content
+ confluence_data['Descriptions'] = 1
else:
description = ''
@@ -353,6 +583,16 @@ def _create_jira_issue(client, issue, config):
# Just add it to the top of the description
formatted_status = "Upstream issue status: %s" % issue.status
description = formatted_status + '\n' + description
+ confluence_data['Status'] = 1
+
+ if issue.reporter:
+ # Add to the description
+ description = '[%s] Upstream Reporter: %s \n %s' % (
+ issue.id,
+ issue.reporter['fullname'],
+ description
+ )
+ confluence_data['Reporters'] = 1
# Add the url if requested
if 'url' in issue.downstream.get('issue_updates', {}):
@@ -361,9 +601,8 @@ def _create_jira_issue(client, issue, config):
kwargs = dict(
summary=issue.title,
description=description,
- issuetype=dict(name=issue_type),
+ issuetype=dict(name="Story" if "RFE" in issue.title else default_type),
)
-
if issue.downstream['project']:
kwargs['project'] = dict(key=issue.downstream['project'])
if issue.downstream.get('component'):
@@ -379,23 +618,71 @@ def _create_jira_issue(client, issue, config):
# Add labels if needed
if 'labels' in issue.downstream.keys():
kwargs['labels'] = issue.downstream['labels']
-
- jira_username = get_jira_username_from_github(config, issue.reporter['fullname'])
- kwargs['reporter'] = {'id': jira_username}
log.info("Creating issue.")
downstream = client.create_issue(**kwargs)
- remote_link = dict(url=issue.url, title=f"[Issue] {issue.title}")
+ # Add Epic link, QA, EXD-Service field if present
+ if issue.downstream.get('epic-link') or \
+ issue.downstream.get('qa-contact') or \
+ issue.downstream.get('EXD-Service'):
+ # Fetch all fields
+ all_fields = client.fields()
+ # Make a map from field name -> field id
+ name_map = {field['name']: field['id'] for field in all_fields}
+ if issue.downstream.get('epic-link'):
+ # Try to get and update the custom field
+ custom_field = name_map.get('Epic Link', None)
+ if custom_field:
+ try:
+ downstream.update({custom_field: issue.downstream.get('epic-link')})
+ except JIRAError:
+ client.add_comment(downstream, f"Error adding Epic-Link: {issue.downstream.get('epic-link')}")
+ confluence_data['Misc. Fields'] += 1
+ if issue.downstream.get('qa-contact'):
+ # Try to get and update the custom field
+ custom_field = name_map.get('QA Contact', None)
+ if custom_field:
+ downstream.update({custom_field: issue.downstream.get('qa-contact')})
+ confluence_data['Misc. Fields'] += 1
+ if issue.downstream.get('EXD-Service'):
+ # Try to update the custom field
+ exd_service_info = issue.downstream.get('EXD-Service')
+ custom_field = name_map.get('EXD-Service', None)
+ if custom_field:
+ try:
+ downstream.update(
+ {custom_field: {"value": f"{exd_service_info['guild']}",
+ "child": {"value": f"{exd_service_info['value']}"}}})
+ except JIRAError:
+ client.add_comment(downstream,
+ f"Error adding EXD-Service field.\n"
+ f"Project: {exd_service_info['guild']}\n"
+ f"Value: {exd_service_info['value']}")
+ confluence_data['Misc. Fields'] += 1
+
+ # Add upstream issue ID in comment if required
+ if 'upstream_id' in issue.downstream.get('issue_updates', []):
+ comment = f"Creating issue for " \
+ f"[{issue.upstream}-#{issue.upstream_id}|{issue.url}]"
+ client.add_comment(downstream, comment)
+ confluence_data['Misc. Fields'] = 1
+
+ remote_link = dict(url=issue.url, title=remote_link_title)
attach_link(client, downstream, remote_link)
default_status = issue.downstream.get('default_status', None)
if default_status is not None:
change_status(client, downstream, default_status, issue)
+ confluence_data['Transitions'] = 1
+
+ # Update Confluence Page
+ if confluence_client.update_stat:
+ confluence_client.update_stat_page(confluence_data)
# Update relevant information (i.e. tags, assignees etc.) if the
# User opted in
- _update_jira_issue(downstream, issue, client, config)
+ _update_jira_issue(downstream, issue, client)
return downstream
@@ -417,7 +704,7 @@ def _label_matching(jira_labels, issue_labels):
return updated_labels
-def _update_jira_issue(existing, issue, client, config):
+def _update_jira_issue(existing, issue, client):
"""
Updates an existing JIRA issue (i.e. tags, assignee, comments etc).
@@ -456,7 +743,7 @@ def _update_jira_issue(existing, issue, client, config):
# Only synchronize assignee for listings that op-in
if any('assignee' in item for item in updates):
log.info("Looking for new assignee(s)")
- _update_assignee(client, existing, issue, updates, config)
+ _update_assignee(client, existing, issue, updates)
# Only synchronize descriptions for listings that op-in
if 'description' in updates:
@@ -508,6 +795,9 @@ def _update_url(existing, issue):
data = {'description': new_description}
existing.update(data)
log.info('Updated description')
+ if confluence_client.update_stat:
+ confluence_data = {'Misc. Fields': 1}
+ confluence_client.update_stat_page(confluence_data)
def _update_transition(client, existing, issue):
@@ -519,6 +809,7 @@ def _update_transition(client, existing, issue):
:param sync2jira.intermediary.Issue issue: Upstream issue
:returns: Nothing
"""
+ confluence_data = {}
# Update the issue status in the JIRA description
# Format the status
formatted_status = "Upstream issue status: %s" % issue.status
@@ -555,6 +846,9 @@ def _update_transition(client, existing, issue):
data = {'description': new_description}
existing.update(data)
log.info('Updated transition')
+ confluence_data['Status'] = 1
+ if confluence_client.update_stat and confluence_data:
+ confluence_client.update_stat_page(confluence_data)
# If the user just inputted True, only update the description
# If the user added a custom closed status, attempt to close the
@@ -624,6 +918,9 @@ def _update_title(issue, existing):
data = {'summary': issue.title}
existing.update(data)
log.info('Updated title')
+ if confluence_client.update_stat:
+ confluence_data = {'Title': 1}
+ confluence_client.update_stat_page(confluence_data)
def _update_comments(client, existing, issue):
@@ -646,6 +943,9 @@ def _update_comments(client, existing, issue):
client.add_comment(existing, comment_body)
if len(comments_d) > 0:
log.info("Comments synchronization done on %i comments." % len(comments_d))
+ if confluence_client.update_stat:
+ confluence_data = {'Comments': len(comments_d)}
+ confluence_client.update_stat_page(confluence_data)
def _update_fixVersion(updates, existing, issue, client):
@@ -698,13 +998,16 @@ def _update_fixVersion(updates, existing, issue, client):
try:
existing.update(data)
log.info('Updated %s fixVersion(s)' % len(fix_version))
+ if confluence_client.update_stat:
+ confluence_data = {'FixVersion': len(fix_version)}
+ confluence_client.update_stat_page(confluence_data)
except JIRAError:
log.warning('Error updating the fixVersion. %s is an invalid fixVersion.' % issue.fixVersion)
# Add a comment to indicate there was an issue
client.add_comment(existing, f"Error updating fixVersion: {issue.fixVersion}")
-def _update_assignee(client, existing, issue, updates, config):
+def _update_assignee(client, existing, issue, updates):
"""
Helper function update existing JIRA assignee from downstream issue.
@@ -712,12 +1015,8 @@ def _update_assignee(client, existing, issue, updates, config):
:param jira.resource.Issue existing: Existing JIRA issue
:param sync2jira.intermediary.Issue issue: Upstream issue
:param List updates: Downstream updates requested by the user
- :param dict config: Config dict
:returns: Nothing
"""
- if not issue.assignee:
- return
-
# First check if overwrite is set to True
try:
# For python 3 >
@@ -726,18 +1025,11 @@ def _update_assignee(client, existing, issue, updates, config):
# for python 2.7
overwrite = bool((filter(lambda d: "assignee" in d, updates))[0]['assignee']['overwrite'])
- # First find our mapped user in JIRA if they exist, else just quit
- mapped_jira_id = config['mapping'][issue.assignee[0].name]['jira']
-
- if not mapped_jira_id:
- log.warn('Could not update assignee')
- return
-
# First check if the issue is already assigned to the same person
update = False
if issue.assignee and issue.assignee[0]:
try:
- update = mapped_jira_id != existing.fields.assignee.key
+ update = issue.assignee[0]['fullname'] != existing.fields.assignee.displayName
except AttributeError:
update = True
@@ -746,20 +1038,30 @@ def _update_assignee(client, existing, issue, updates, config):
# And the issue has an assignee
if not existing.fields.assignee and issue.assignee:
if issue.assignee[0] and update:
- existing.update({'assignee': {'id': mapped_jira_id}})
+ # Update the assignee
+ assign_user(client, issue, existing)
log.info('Updated assignee')
+ if confluence_client.update_stat:
+ confluence_data = {'Assignee': 1}
+ confluence_client.update_stat_page(confluence_data)
return
else:
# Update the assignee if we have someone to assignee it too
if update:
- existing.update({'assignee': {'id': mapped_jira_id}})
+ assign_user(client, issue, existing)
log.info('Updated assignee')
+ if confluence_client.update_stat:
+ confluence_data = {'Assignee': 1}
+ confluence_client.update_stat_page(confluence_data)
else:
if existing.fields.assignee and not issue.assignee:
# Else we should remove all assignees
# Set removeAll flag to true
- existing.update({'assignee': {'name': ''}})
+ assign_user(client, issue, existing, remove_all=True)
log.info('Updated assignee')
+ if confluence_client.update_stat:
+ confluence_data = {'Assignee': 1}
+ confluence_client.update_stat_page(confluence_data)
def _update_jira_labels(issue, labels):
@@ -778,6 +1080,9 @@ def _update_jira_labels(issue, labels):
data = {'labels': _labels}
issue.update(data)
log.info('Updated %s tag(s)' % len(_labels))
+ if confluence_client.update_stat:
+ confluence_data = {'Tags': len(_labels)}
+ confluence_client.update_stat_page(confluence_data)
def _update_tags(updates, existing, issue):
@@ -826,11 +1131,43 @@ def _update_description(existing, issue):
r"Upstream description:(\r\n*|\r*|\n*|.*){quote}((?s).*){quote}",
r"Upstream description: {quote}%s{quote}" % issue.content,
new_description)
+ elif '] Upstream issue status:' in new_description and '] Upstream Reporter:' in new_description:
+ # We need to add a description field
+ today = datetime.today()
+ new_description = re.sub(r'\[[\w\W]*\] Upstream issue status: %s\n\[%s\] Upstream Reporter: %s' % (
+ issue.status, issue.id, issue.reporter['fullname']),
+ r'[%s] Upstream issue status: %s\n[%s] Upstream Reporter: %s\n'
+ r'Upstream description: {quote}%s{quote}' % (
+ today.strftime("%a %b %y - %H:%M"), issue.status, issue.id,
+ issue.reporter['fullname'],
+ issue.content),
+ new_description)
+
+ elif '] Upstream issue status:' in new_description and '] Upstream Reporter:' not in new_description:
+ # We need to add a upstream reporter and description field
+ today = datetime.today()
+ new_description = re.sub(r'\[[\w\W]*\] Upstream issue status: %s' % issue.status,
+ r'[%s] Upstream issue status: %s\n'
+ r'[%s] Upstream Reporter: %s\n'
+ r'Upstream description: {quote}%s{quote}' %
+ (today.strftime("%a %b %y - %H:%M"), issue.status, issue.id,
+ issue.reporter['fullname'], issue.content),
+ new_description)
+ elif '] Upstream issue status:' not in new_description and '] Upstream Reporter:' in new_description:
+ # We need to just add the description field
+ new_description = re.sub(
+ r'\[%s\] Upstream Reporter: %s [\w\W]*' % (issue.id, issue.reporter['fullname']),
+ r'[%s] Upstream Reporter: %s \nUpstream description: {quote} %s {quote}' %
+ (issue.id, issue.reporter['fullname'], issue.content), new_description)
else:
- # Just add description to the top
- upstream_description = "Upstream description: " \
+ # Just add reporter and description to the top
+ upstream_reporter = '[%s] Upstream Reporter: %s' % (
+ issue.id,
+ issue.reporter['fullname']
+ )
+ upstream_description = "%s \nUpstream description: " \
"{quote}%s{quote}" % \
- (issue.content)
+ (upstream_reporter, issue.content)
new_description = '%s \n %s' % \
(upstream_description, new_description)
# Now that we've updated the description (i.e. added
@@ -844,9 +1181,21 @@ def _update_description(existing, issue):
# Now we can update the JIRA issue if we need to
if new_description != existing.fields.description:
+ # This logging is temporary and will be used to debug an
+ # issue regarding phantom updates
+ # Get the diff between new_description and existing
+ diff = difflib.unified_diff(existing.fields.description, new_description)
+ log.info(f"DEBUG: Issue {issue.title}")
+ log.info(f"DEBUG: Diff: {''.join(diff)}")
+ log.info(f"DEBUG: Old: {existing.fields.description}")
+ log.info(f"DEBUG: New: {new_description}")
+
data = {'description': new_description}
existing.update(data)
log.info('Updated description')
+ if confluence_client.update_stat:
+ confluence_data = {'Description': 1}
+ confluence_client.update_stat_page(confluence_data)
def _update_on_close(existing, issue, updates):
@@ -923,6 +1272,11 @@ def sync_with_jira(issue, config):
# Create a client connection for this issue
client = get_jira_client(issue, config)
+ # Check the status of the JIRA client
+ if not config['sync2jira']['develop'] and not check_jira_status(client):
+ log.warning('The JIRA server looks like its down. Shutting down...')
+ raise JIRAError
+
if issue.downstream.get('issue_updates', None):
if issue.source == 'github' and issue.content and \
'github_markdown' in issue.downstream['issue_updates']:
@@ -939,7 +1293,7 @@ def sync_with_jira(issue, config):
log.info("Testing flag is true. Skipping actual update.")
return
# Update relevant metadata (i.e. tags, assignee, etc)
- _update_jira_issue(existing, issue, client, config)
+ _update_jira_issue(existing, issue, client)
return
# If we're *not* configured to do legacy matching (upgrade mode) then there
@@ -960,8 +1314,83 @@ def sync_with_jira(issue, config):
else:
_upgrade_jira_issue(client, match, issue, config)
-def get_jira_username_from_github(config, github_login):
- """ Helper function to get JIRA username from Github login """
- for name, data in config['mapping'].items():
- if name == github_login:
- return data['jira']
\ No newline at end of file
+
+def _close_as_duplicate(client, duplicate, keeper, config):
+ """
+ Helper function to close an issue as a duplicate.
+
+ :param jira.client client: JIRA Client
+ :param jira.resources.Issue duplicate: Duplicate JIRA Issue
+ :param jira.resources.Issue keeper: JIRA issue to keep
+ :param Dict config: Config dict
+ :returns: Nothing
+ """
+ log.info("Closing %s as duplicate of %s", duplicate.permalink(), keeper.permalink())
+ if config['sync2jira']['testing']:
+ log.info("Testing flag is true. Skipping actual delete.")
+ return
+
+ # Find the id of some dropped or done state.
+ transitions = client.transitions(duplicate)
+ transitions = dict([(t['name'], t['id']) for t in transitions])
+ closed = None
+ preferences = ['Dropped', 'Reject', 'Done', 'Closed', 'Closed (2)', ]
+ for preference in preferences:
+ if preference in transitions:
+ closed = transitions[preference]
+ break
+
+ text = 'Marking as duplicate of %s' % keeper.key
+ if any([text in comment.body for comment in client.comments(duplicate)]):
+ log.info("Skipping comment. Already present.")
+ else:
+ client.add_comment(duplicate, text)
+
+ text = '%s is a duplicate.' % duplicate.key
+ if any([text in comment.body for comment in client.comments(keeper)]):
+ log.info("Skipping comment. Already present.")
+ else:
+ client.add_comment(keeper, text)
+
+ if closed:
+ try:
+ client.transition_issue(duplicate, closed, resolution={'name': 'Duplicate'})
+ except Exception as e:
+ if "Field 'resolution' cannot be set" in e.response.text:
+ # Try closing without a specific resolution.
+ try:
+ client.transition_issue(duplicate, closed)
+ except Exception:
+ log.exception("Failed to close %r", duplicate.permalink())
+ else:
+ log.exception("Failed to close %r", duplicate.permalink())
+ else:
+ log.warning("Unable to find close transition for %r" % duplicate.key)
+
+
+def close_duplicates(issue, config):
+ """
+ Function to close duplicate JIRA issues.
+
+ :param sync2jira.intermediary.Issue issue: Upstream Issue
+ :param Dict config: Config dict
+ :returns: Nothing
+ """
+ # Create a client connection for this issue
+ client = get_jira_client(issue, config)
+
+ # Check the status of the JIRA client
+ if not config['sync2jira']['develop'] and not check_jira_status(client):
+ log.warning('The JIRA server looks like its down. Shutting down...')
+ raise JIRAError
+
+ log.info("Looking for dupes of upstream %s, %s", issue.url, issue.title)
+ results = _matching_jira_issue_query(client, issue, config, free=True)
+ if len(results) <= 1:
+ log.info("No duplicates found.")
+ return
+
+ results = sorted(results, key=lambda x: arrow.get(x.fields.created))
+ keeper, duplicates = results[0], results[1:]
+ for duplicate in duplicates:
+ _close_as_duplicate(client, duplicate, keeper, config)
diff --git a/sync2jira/downstream_pr.py b/sync2jira/downstream_pr.py
index 25da5f4..e01ecec 100644
--- a/sync2jira/downstream_pr.py
+++ b/sync2jira/downstream_pr.py
@@ -25,12 +25,13 @@
# Local Modules
import sync2jira.downstream_issue as d_issue
from sync2jira.intermediary import Issue, matcher
+from sync2jira.confluence_client import confluence_client
log = logging.getLogger('sync2jira')
-def format_comment(pr, pr_suffix, client, config):
+def format_comment(pr, pr_suffix, client):
"""
Formats comment to link PR.
:param sync2jira.intermediary.PR pr: Upstream issue we're pulling data from
@@ -40,21 +41,25 @@ def format_comment(pr, pr_suffix, client, config):
:rtype: String
"""
# Find the pr.reporters JIRA username
- ret = get_jira_username_from_github(config, pr.reporter)
- if ret:
- reporter = f"[~accountid:{ret}]"
+ ret = client.search_users(pr.reporter)
+ if len(ret) > 0:
+ # Loop through ret till we find an match
+ for user in ret:
+ if user.displayName == pr.reporter:
+ reporter = f"[~{user.key}]"
+ break
else:
reporter = pr.reporter
if 'closed' in pr_suffix:
- comment = f"Merge request [{pr.title.replace(']', '').replace('[', '')}|{pr.url}] was closed."
+ comment = f"Merge request [{pr.title}| {pr.url}] was closed."
elif 'reopened' in pr_suffix:
- comment = f"Merge request [{pr.title.replace(']', '').replace('[', '')}|{pr.url}] was reopened."
+ comment = f"Merge request [{pr.title}| {pr.url}] was reopened."
elif 'merged' in pr_suffix:
- comment = f"Merge request [{pr.title.replace(']', '').replace('[', '')}|{pr.url}] was merged!"
+ comment = f"Merge request [{pr.title}| {pr.url}] was merged!"
else:
comment = f"{reporter} mentioned this issue in " \
- f"merge request [{pr.title.replace(']', '').replace('[', '')}| {pr.url}]."
+ f"merge request [{pr.title}| {pr.url}]."
return comment
@@ -92,33 +97,34 @@ def comment_exists(client, existing, new_comment):
return False
-def update_jira_issue(existing, pr, client, config):
+def update_jira_issue(existing, pr, client):
"""
Updates an existing JIRA issue (i.e. tags, assignee, comments etc).
:param jira.resources.Issue existing: Existing JIRA issue that was found
:param sync2jira.intermediary.PR pr: Upstream issue we're pulling data from
:param jira.client.JIRA client: JIRA Client
- :param dict config: Config dict
:returns: Nothing
"""
# Get our updates array
updates = pr.downstream.get('pr_updates', {})
# Format and add comment to indicate PR has been linked
- new_comment = format_comment(pr, pr.suffix, client, config)
-
+ new_comment = format_comment(pr, pr.suffix, client)
# See if the issue_link and comment exists
exists = issue_link_exists(client, existing, pr)
comment_exist = comment_exists(client, existing, new_comment)
# Check if the comment if already there
if not exists:
+ if not comment_exist:
+ log.info(f"Added comment for PR {pr.title} on JIRA {pr.jira_key}")
+ client.add_comment(existing, new_comment)
# Attach remote link
remote_link = dict(url=pr.url, title=f"[PR] {pr.title}")
d_issue.attach_link(client, existing, remote_link)
- if not comment_exist:
- log.info(f"Added comment for PR {pr.title} on JIRA {pr.jira_key}")
- client.add_comment(existing, new_comment)
+ if confluence_client.update_stat:
+ confluence_data = {'Comments': 1}
+ confluence_client.update_stat_page(confluence_data)
# Only synchronize link_transition for listings that op-in
if any('merge_transition' in item for item in updates) and 'merged' in pr.suffix:
@@ -144,14 +150,12 @@ def update_transition(client, existing, pr, transition_type):
:returns: Nothing
"""
# Get our closed status
- link_status = [transition for transition in pr.downstream.get('pr_updates', []) if transition_type in transition]
- if link_status:
- closed_status = link_status[0][transition_type]
+ closed_status = list(filter(lambda d: transition_type in d, pr.downstream.get('pr_updates', {})))[0][transition_type]
- # Update the state
- d_issue.change_status(client, existing, closed_status, pr)
+ # Update the state
+ d_issue.change_status(client, existing, closed_status, pr)
- log.info(f"Updated {transition_type} for issue {pr.title}")
+ log.info(f"Updated {transition_type} for issue {pr.title}")
def sync_with_jira(pr, config):
@@ -177,6 +181,11 @@ def sync_with_jira(pr, config):
# Create a client connection for this issue
client = d_issue.get_jira_client(pr, config)
+ # Check the status of the JIRA client
+ if not config['sync2jira']['develop'] and not d_issue.check_jira_status(client):
+ log.warning('The JIRA server looks like its down. Shutting down...')
+ raise JIRAError
+
# Find our JIRA issue if one exists
if isinstance(pr, Issue):
pr.jira_key = matcher(pr.content, pr.comments)
@@ -198,11 +207,5 @@ def sync_with_jira(pr, config):
# Else start syncing relevant information
log.info(f"Syncing PR {pr.title}")
- update_jira_issue(existing, pr, client, config)
+ update_jira_issue(existing, pr, client)
log.info(f"Done syncing PR {pr.title}")
-
-def get_jira_username_from_github(config, github_login):
- """ Helper function to get JIRA username from Github login """
- for name, data in config['mapping'].items():
- if name == github_login:
- return data['jira']
\ No newline at end of file
diff --git a/sync2jira/email_template.jinja b/sync2jira/email_template.jinja
new file mode 100644
index 0000000..afac25b
--- /dev/null
+++ b/sync2jira/email_template.jinja
@@ -0,0 +1,29 @@
+
+
+ Hello {{ user['name'] }}, It looks like you have some duplicate issues for
+ upstream issue {{ issue._title }}
+ This issue was selected:
+
+ But these issues were also found:
+
+ Make sure to mark these duplicate issues as 'Closed' to avoid these emails!
+ {% if admins|length > 0 %}
+ Questions? Get in contact with one of the admins:
+ {% for admin in admins %}
+ {{ admin.name }}
+ {{ "," if not loop.last }}
+ {% endfor %}
+
+ {% endif %}
+
+
\ No newline at end of file
diff --git a/sync2jira/failure_template.jinja b/sync2jira/failure_template.jinja
new file mode 100644
index 0000000..3b6ac1f
--- /dev/null
+++ b/sync2jira/failure_template.jinja
@@ -0,0 +1,7 @@
+
+
+ Looks like Sync2Jira has failed!
+ Here is the full traceback:
+ {{ traceback }}
+
+
\ No newline at end of file
diff --git a/sync2jira/intermediary.py b/sync2jira/intermediary.py
index b9865b4..e3a36cd 100644
--- a/sync2jira/intermediary.py
+++ b/sync2jira/intermediary.py
@@ -60,6 +60,53 @@ def title(self):
def upstream_title(self):
return self._title
+ @classmethod
+ def from_pagure(cls, upstream, issue, config):
+ """Helper function to create intermediary object."""
+ base = config['sync2jira'].get('pagure_url', 'https://pagure.io')
+ upstream_source = 'pagure'
+ comments = []
+ for comment in issue['comments']:
+ # Only add comments that are not Metadata updates
+ if '**Metadata Update' in comment['comment']:
+ continue
+ # Else add the comment
+ # Convert the date to datetime
+ comment['date_created'] = datetime.fromtimestamp(float(comment['date_created']))
+ comments.append({
+ 'author': comment['user']['name'],
+ 'body': comment['comment'],
+ 'name': comment['user']['name'],
+ 'id': comment['id'],
+ 'date_created': comment['date_created'],
+ 'changed': None
+ })
+
+ # Perform any mapping
+ mapping = config['sync2jira']['map'][upstream_source][upstream].get('mapping', [])
+
+ # Check for fixVersion
+ if any('fixVersion' in item for item in mapping):
+ map_fixVersion(mapping, issue)
+
+ return Issue(
+ source=upstream_source,
+ title=issue['title'],
+ url=base + '/%s/issue/%i' % (upstream, issue['id']),
+ upstream=upstream,
+ config=config,
+ comments=comments,
+ tags=issue['tags'],
+ fixVersion=[issue['milestone']],
+ priority=issue['priority'],
+ content=issue['content'],
+ reporter=issue['user'],
+ assignee=issue['assignee'],
+ status=issue['status'],
+ id=issue['date_created'],
+ upstream_id=issue['id']
+ )
+
@classmethod
def from_github(cls, upstream, issue, config):
"""Helper function to create intermediary object."""
@@ -124,6 +171,8 @@ def __init__(self, source, jira_key, title, url, upstream, config,
self.url = url
self.upstream = upstream
self.comments = comments
+ # self.tags = tags
+ # self.fixVersion = fixVersion
self.priority = priority
# JIRA treats utf-8 characters in ways we don't totally understand, so scrub content down to
@@ -142,6 +191,7 @@ def __init__(self, source, jira_key, title, url, upstream, config,
self.id = str(id)
self.suffix = suffix
self.match = match
+ # self.upstream_id = upstream_id
if not downstream:
self.downstream = config['sync2jira']['map'][self.source][upstream]
@@ -153,6 +203,59 @@ def __init__(self, source, jira_key, title, url, upstream, config,
def title(self):
return u'[%s] %s' % (self.upstream, self._title)
+ @classmethod
+ def from_pagure(self, upstream, pr, suffix, config):
+ """Helper function to create intermediary object."""
+ # Set our upstream source
+ upstream_source = 'pagure'
+
+ # Format our comments
+ comments = []
+ for comment in pr['comments']:
+ # Only add comments that are not Metadata updates
+ if '**Metadata Update' in comment['comment']:
+ continue
+ # Else add the comment
+ # Convert the date to datetime
+ comment['date_created'] = datetime.fromtimestamp(
+ float(comment['date_created']))
+ comments.append({
+ 'author': comment['user']['name'],
+ 'body': comment['comment'],
+ 'name': comment['user']['name'],
+ 'id': comment['id'],
+ 'date_created': comment['date_created'],
+ 'changed': None
+ })
+
+ # Build our URL
+ url = f"https://pagure.io/{pr['project']['name']}/pull-request/{pr['id']}"
+
+ # Match a JIRA
+ match = matcher(pr.get('initial_comment'), comments)
+
+ # Return our PR object
+ return PR(
+ source=upstream_source,
+ jira_key=match,
+ title=pr['title'],
+ url=url,
+ upstream=upstream,
+ config=config,
+ comments=comments,
+ # tags=issue['labels'],
+ # fixVersion=[issue['milestone']],
+ priority=None,
+ content=pr['initial_comment'],
+ reporter=pr['user']['fullname'],
+ assignee=pr['assignee'],
+ status=pr['status'],
+ id=pr['id'],
+ suffix=suffix,
+ match=match,
+ # upstream_id=issue['number']
+ )
+
@classmethod
def from_github(self, upstream, pr, suffix, config):
"""Helper function to create intermediary object."""
@@ -177,6 +280,16 @@ def from_github(self, upstream, pr, suffix, config):
# Match to a JIRA
match = matcher(pr.get("body"), comments)
+ # Figure out what state we're transitioning too
+ if 'reopened' in suffix:
+ suffix = 'reopened'
+ elif 'closed' in suffix:
+ # Check if we're merging or closing
+ if pr['merged']:
+ suffix = 'merged'
+ else:
+ suffix = 'closed'
+
# Return our PR object
return PR(
source=upstream_source,
@@ -186,13 +299,16 @@ def from_github(self, upstream, pr, suffix, config):
upstream=upstream,
config=config,
comments=comments,
+ # tags=issue['labels'],
+ # fixVersion=[issue['milestone']],
priority=None,
content=pr.get('body'),
reporter=pr['user']['fullname'],
- assignee=pr['assignees'],
+ assignee=pr['assignee'],
# GitHub PRs do not have status
status=None,
id=pr['number'],
+ # upstream_id=issue['number'],
suffix=suffix,
match=match,
)
diff --git a/sync2jira/mailer.py b/sync2jira/mailer.py
new file mode 100644
index 0000000..35a9324
--- /dev/null
+++ b/sync2jira/mailer.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python3
+"""
+This script is used to send emails
+"""
+
+import smtplib
+import os
+from email.mime.text import MIMEText
+from email.mime.multipart import MIMEMultipart
+
+DEFAULT_FROM = os.environ.get('DEFAULT_FROM')
+DEFAULT_SERVER = os.environ.get('DEFAULT_SERVER')
+
+
+def send_mail(recipients, subject, text, cc):
+ """
+ Sends email to recipients.
+
+ :param List recipients: recipients of email
+ :param String subject: subject of the email
+ :param String text: HTML text
+ :param String cc: cc of the email
+ :param String text: text of the email
+ :returns: Nothing
+ """
+ _cfg = {}
+ _cfg.setdefault("server", DEFAULT_SERVER)
+ _cfg.setdefault("from", DEFAULT_FROM)
+ sender = _cfg["from"]
+ msg = MIMEMultipart('related')
+ msg["Subject"] = subject
+ msg["From"] = sender
+ msg["To"] = ", ".join(recipients)
+ if cc:
+ msg['Cc'] = ", ".join(cc)
+ server = smtplib.SMTP(_cfg["server"])
+ part = MIMEText(text, 'html', 'utf-8')
+ msg.attach(part)
+ server.sendmail(sender, recipients, msg.as_string())
+ server.quit()
diff --git a/sync2jira/main.py b/sync2jira/main.py
index f0f003c..198afe2 100644
--- a/sync2jira/main.py
+++ b/sync2jira/main.py
@@ -16,7 +16,7 @@
# License along with sync2jira; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110.15.0 USA
#
-# Authors: Ralph Bean , Sid Premkumar
+# Authors: Ralph Bean
""" Sync github and pagure issues to a jira instance, via fedmsg.
Run with systemd, please.
@@ -29,9 +29,10 @@
import requests
from copy import deepcopy
import os
-import json
# 3rd Party Modules
+import fedmsg
+import fedmsg.config
import jinja2
from requests_kerberos import HTTPKerberosAuth, OPTIONAL
@@ -40,7 +41,9 @@
import sync2jira.upstream_pr as u_pr
import sync2jira.downstream_issue as d_issue
import sync2jira.downstream_pr as d_pr
+from sync2jira.mailer import send_mail
from sync2jira.intermediary import matcher
+from sync2jira.confluence_client import confluence_client
# Set up our logging
FORMAT = "[%(asctime)s] %(levelname)s: %(message)s"
@@ -48,11 +51,66 @@
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
logging.basicConfig(format=FORMAT, level=logging.WARNING)
log = logging.getLogger('sync2jira')
-
+if os.environ.get('CONFLUENCE_SPACE') == 'mock_confluence_space':
+ # If we are debugging save log output
+ try:
+ hdlr = logging.FileHandler('sync2jira_main.log')
+ log.addHandler(hdlr)
+ log.setLevel(logging.DEBUG)
+ except: # noqa: E722
+ log.error("Unable to create log file!")
+
+# Only allow fedmsg logs that are critical
+fedmsg_log = logging.getLogger('fedmsg.crypto.utils')
+fedmsg_log.setLevel(50)
+
+remote_link_title = "Upstream issue"
+failure_email_subject = "Sync2Jira Has Failed!"
+
+# Issue related handlers
+issue_handlers = {
+ # GitHub
+ 'github.issue.opened': u_issue.handle_github_message,
+ 'github.issue.reopened': u_issue.handle_github_message,
+ 'github.issue.labeled': u_issue.handle_github_message,
+ 'github.issue.assigned': u_issue.handle_github_message,
+ 'github.issue.unassigned': u_issue.handle_github_message,
+ 'github.issue.closed': u_issue.handle_github_message,
+ 'github.issue.comment': u_issue.handle_github_message,
+ 'github.issue.unlabeled': u_issue.handle_github_message,
+ 'github.issue.milestoned': u_issue.handle_github_message,
+ 'github.issue.demilestoned': u_issue.handle_github_message,
+ 'github.issue.edited': u_issue.handle_github_message,
+ # Pagure
+ 'pagure.issue.new': u_issue.handle_pagure_message,
+ 'pagure.issue.tag.added': u_issue.handle_pagure_message,
+ 'pagure.issue.comment.added': u_issue.handle_pagure_message,
+ 'pagure.issue.comment.edited': u_issue.handle_pagure_message,
+ 'pagure.issue.assigned.added': u_issue.handle_pagure_message,
+ 'pagure.issue.assigned.reset': u_issue.handle_pagure_message,
+ 'pagure.issue.edit': u_issue.handle_pagure_message,
+ 'pagure.issue.drop': u_issue.handle_pagure_message,
+ 'pagure.issue.tag.removed': u_issue.handle_pagure_message,
+}
+
+# PR related handlers
+pr_handlers = {
+ # GitHub
+ 'github.pull_request.opened': u_pr.handle_github_message,
+ 'github.pull_request.edited': u_pr.handle_github_message,
+ 'github.issue.comment': u_pr.handle_github_message,
+ 'github.pull_request.reopened': u_pr.handle_github_message,
+ 'github.pull_request.closed': u_pr.handle_github_message,
+ # Pagure
+ 'pagure.pull-request.new': u_pr.handle_pagure_message,
+ 'pagure.pull-request.comment.added': u_pr.handle_pagure_message,
+ 'pagure.pull-request.initial_comment.edited': u_pr.handle_pagure_message,
+}
+DATAGREPPER_URL = "http://apps.fedoraproject.org/datagrepper/raw"
INITIALIZE = os.getenv('INITIALIZE', '0')
-def load_config(config=os.environ['SYNC2JIRA_CONFIG']):
+def load_config(loader=fedmsg.config.load_config):
"""
Generates and validates the config file \
that will be used by fedmsg and JIRA client.
@@ -61,8 +119,10 @@ def load_config(config=os.environ['SYNC2JIRA_CONFIG']):
:returns: The config dict to be used later in the program
:rtype: Dict
"""
- with open(config, 'r') as jsonFile:
- config = json.loads(jsonFile.read())
+ config = loader()
+
+ # Force some vars that we like
+ config['mute'] = True
# Validate it
if 'sync2jira' not in config:
@@ -71,7 +131,7 @@ def load_config(config=os.environ['SYNC2JIRA_CONFIG']):
if 'map' not in config['sync2jira']:
raise ValueError("No sync2jira.map section found in fedmsg.d/ config")
- possible = set(['github'])
+ possible = set(['pagure', 'github'])
specified = set(config['sync2jira']['map'].keys())
if not specified.issubset(possible):
message = "Specified handlers: %s, must be a subset of %s."
@@ -81,14 +141,7 @@ def load_config(config=os.environ['SYNC2JIRA_CONFIG']):
))
if 'jira' not in config['sync2jira']:
- raise ValueError("No sync2jira.jira section found in config")
-
- # Update config based on env vars
- config['sync2jira']['github_token'] = os.environ['SYNC2JIRA_GITHUB_TOKEN']
- config['sync2jira']['jira'][config['sync2jira']['default_jira_instance']]['basic_auth'] = (
- os.environ['SYNC2JIRA_JIRA_USERNAME'],
- os.environ['SYNC2JIRA_JIRA_PASSWORD']
- )
+ raise ValueError("No sync2jira.jira section found in fedmsg.d/ config")
# Provide some default values
defaults = {
@@ -100,40 +153,32 @@ def load_config(config=os.environ['SYNC2JIRA_CONFIG']):
return config
-def listen(config, event_emitter):
+def listen(config):
"""
Listens to activity on upstream repos on pagure and github \
via fedmsg, and syncs new issues there to the JIRA instance \
defined in 'fedmsg.d/sync2jira.py'
:param Dict config: Config dict
- :param rxObject event_emitter: Event emitter to wait for
:returns: Nothing
"""
if not config['sync2jira'].get('listen'):
log.info("`listen` is disabled. Exiting.")
return
- log.info("Waiting for a relevant webhook message to arrive...")
- event_emitter.subscribe(
- lambda x: handle_message(config, x)
- )
+ log.info("Waiting for a relevant fedmsg message to arrive...")
+ for _, _, topic, msg in fedmsg.tail_messages(**config):
+ idx = msg['msg_id']
+ suffix = ".".join(topic.split('.')[3:])
+ log.debug("Encountered %r %r %r", suffix, topic, idx)
+
+ if suffix not in issue_handlers and suffix not in pr_handlers:
+ continue
+
+ log.debug("Handling %r %r %r", suffix, topic, idx)
+
+ handle_msg(msg, suffix, config)
- while True:
- # Constantly refresh the config file
- config = load_config()
- sleep(10)
-
-def handle_message(config, incoming_json):
- if ('pull_request' in incoming_json.keys()):
- pr = u_pr.handle_github_message(config, incoming_json)
- if pr:
- d_pr.sync_with_jira(pr, config)
- elif ('issue' in incoming_json.keys()):
- issue = u_issue.handle_github_message(config, incoming_json)
- if issue:
- d_issue.sync_with_jira(issue, config)
-
def initialize_issues(config, testing=False, repo_name=None):
"""
@@ -150,6 +195,18 @@ def initialize_issues(config, testing=False, repo_name=None):
log.info("Running initialization to sync all issues from upstream to jira")
log.info("Testing flag is %r", config['sync2jira']['testing'])
mapping = config['sync2jira']['map']
+ for upstream in mapping.get('pagure', {}).keys():
+ if 'issue' not in mapping.get('pagure', {}).get(upstream, {}).get('sync', []):
+ continue
+ if repo_name is not None and upstream != repo_name:
+ continue
+ for issue in u_issue.pagure_issues(upstream, config):
+ try:
+ d_issue.sync_with_jira(issue, config)
+ except Exception as e:
+ log.error(f"Failed on {issue}\nException: {e}")
+ raise
+ log.info("Done with pagure issue initialization.")
for upstream in mapping.get('github', {}).keys():
if 'issue' not in mapping.get('github', {}).get(upstream, {}).get('sync', []):
@@ -176,6 +233,7 @@ def initialize_issues(config, testing=False, repo_name=None):
else:
if not config['sync2jira']['develop']:
# Only send the failure email if we are not developing
+ report_failure(config)
raise
log.info("Done with github issue initialization.")
@@ -195,6 +253,15 @@ def initialize_pr(config, testing=False, repo_name=None):
log.info("Running initialization to sync all PRs from upstream to jira")
log.info("Testing flag is %r", config['sync2jira']['testing'])
mapping = config['sync2jira']['map']
+ for upstream in mapping.get('pagure', {}).keys():
+ if 'pullrequest' not in mapping.get('pagure', {}).get(upstream, {}).get('sync', []):
+ continue
+ if repo_name is not None and upstream != repo_name:
+ continue
+ for pr in u_pr.pagure_prs(upstream, config):
+ if pr:
+ d_pr.sync_with_jira(pr, config)
+ log.info("Done with pagure PR initialization.")
for upstream in mapping.get('github', {}).keys():
if 'pullrequest' not in mapping.get('github', {}).get(upstream, {}).get('sync', []):
@@ -222,16 +289,144 @@ def initialize_pr(config, testing=False, repo_name=None):
else:
if not config['sync2jira']['develop']:
# Only send the failure email if we are not developing
+ report_failure(config)
raise
log.info("Done with github PR initialization.")
-def main(event_emitter):
+
+def initialize_recent(config):
+ """
+ Initializes based on the recent history of datagrepper
+
+ :param Dict config: Config dict
+ :return: Nothing
+ """
+ # Query datagrepper
+ ret = query(category=['github', 'pagure'], delta=int(600), rows_per_page=100)
+
+ # Loop and sync
+ for entry in ret:
+ # Extract our topic
+ suffix = ".".join(entry['topic'].split('.')[3:])
+ log.debug("Encountered %r %r", suffix, entry['topic'])
+
+ # Disregard if it's invalid
+ if suffix not in issue_handlers and suffix not in pr_handlers:
+ continue
+
+ # Deal with the message
+ log.debug("Handling %r %r", suffix, entry['topic'])
+ msg = entry['msg']
+ handle_msg({'msg': msg}, suffix, config)
+
+
+def handle_msg(msg, suffix, config):
+ """
+ Function to handle incomming message from datagrepper
+ :param Dict msg: Incoming message
+ :param String suffix: Incoming suffix
+ :param Dict config: Config dict
+ """
+ issue = None
+ pr = None
+ # Github '.issue.' is used for both PR and Issue
+ # Check for that edge case
+ if suffix == 'github.issue.comment':
+ if 'pull_request' in msg['msg']['issue'] and msg['msg']['action'] != 'deleted':
+ # pr_filter turns on/off the filtering of PRs
+ pr = issue_handlers[suffix](msg, config, pr_filter=False)
+ if not pr:
+ return
+ # Issues do not have suffix and reporter needs to be reformatted
+ pr.suffix = suffix
+ pr.reporter = pr.reporter.get('fullname')
+ setattr(pr, 'match', matcher(pr.content, pr.comments))
+ else:
+ issue = issue_handlers[suffix](msg, config)
+ elif suffix in issue_handlers:
+ issue = issue_handlers[suffix](msg, config)
+ elif suffix in pr_handlers:
+ pr = pr_handlers[suffix](msg, config, suffix)
+
+ if not issue and not pr:
+ return
+ if issue:
+ d_issue.sync_with_jira(issue, config)
+ elif pr:
+ d_pr.sync_with_jira(pr, config)
+
+
+def query(limit=None, **kwargs):
+ """
+ Run query on Datagrepper
+
+ Args:
+ limit: the max number of messages to fetch at a time
+ kwargs: keyword arguments to build request parameters
+ """
+ # Pack up the kwargs into a parameter list for request
+ params = deepcopy(kwargs)
+
+ # Set up for paging requests
+ all_results = []
+ page = params.get('page', 1)
+
+ # Important to set ASC order when paging to avoid duplicates
+ params['order'] = 'asc'
+
+ results = get(params=params)
+
+ # Collect the messages
+ all_results.extend(results['raw_messages'])
+
+ # Set up for loop
+ fetched = results['count']
+ total = limit or results['total']
+
+ # Fetch results until no more are left
+ while fetched < total:
+ page += 1
+ params['page'] = page
+
+ results = get(params=params)
+ count = results['count']
+ fetched += count
+
+ # if we missed the condition and haven't fetched any
+ if count == 0:
+ break
+
+ all_results.extend(results['raw_messages'])
+
+ return all_results
+
+
+def get(params):
+ url = DATAGREPPER_URL
+ headers = {'Accept': 'application/json', }
+
+ response = requests.get(url=url, params=params, headers=headers,
+ auth=HTTPKerberosAuth(mutual_authentication=OPTIONAL))
+ return response.json()
+
+
+def main(runtime_test=False, runtime_config=None):
"""
Main function to check for initial sync
- and listen.
+ and listen for fedmgs.
+
+ :param Bool runtime_test: Flag to indicate if we are performing a runtime test. Default false
+ :param Dict runtime_config: Config file to be used if it is a runtime test. runtime_test must be true
+ :return: Nothing
"""
- # Load config
- config = load_config()
+ # Load config and disable warnings
+ if not runtime_test or not runtime_config:
+ config = load_config()
+ else:
+ config = runtime_config
+
+ if config['sync2jira']['confluence_statistics']:
+ confluence_client.update_stat_value(True)
logging.basicConfig(level=logging.INFO)
warnings.simplefilter("ignore")
@@ -245,17 +440,93 @@ def main(event_emitter):
initialize_issues(config)
log.info("Initializing PRs...")
initialize_pr(config)
+ if runtime_test:
+ return
else:
# Pool datagrepper from the last 10 mins
- log.info("Initialization False...")
+ log.info("Initialization False. Pulling data from datagrepper...")
+ initialize_recent(config)
try:
- listen(config, event_emitter)
+ listen(config)
except KeyboardInterrupt:
pass
except: # noqa: E722
if not config['sync2jira']['develop']:
# Only send the failure email if we are not developing
+ report_failure(config)
raise
+
+def report_failure(config):
+ """
+ Helper function to alert admins in case of failure.
+
+
+ :param Dict config: Config dict for JIRA
+ """
+ # Email our admins with the traceback
+ templateLoader = jinja2.FileSystemLoader(
+ searchpath='usr/local/src/sync2jira/sync2jira/')
+ templateEnv = jinja2.Environment(loader=templateLoader)
+ template = templateEnv.get_template('failure_template.jinja')
+ html_text = template.render(traceback=traceback.format_exc())
+
+ # Send mail
+ send_mail(recipients=[config['sync2jira']['mailing-list']],
+ cc=None,
+ subject=failure_email_subject,
+ text=html_text)
+
+
+def list_managed():
+ """
+ Function to list URL for issues under map in config.
+
+ :return: Nothing
+ """
+ config = load_config()
+ mapping = config['sync2jira']['map']
+ warnings.simplefilter("ignore")
+
+ for upstream in mapping.get('pagure', {}).keys():
+ for issue in u_issue.pagure_issues(upstream, config):
+ print(issue.url)
+
+ for upstream in mapping.get('github', {}).keys():
+ for issue in u_issue.github_issues(upstream, config):
+ print(issue.url)
+
+
+def close_duplicates():
+ """
+ Function to close duplicate functions. Uses downstream:close_duplicates.
+
+ :return: Nothing
+ """
+ config = load_config()
+ logging.basicConfig(level=logging.INFO)
+ log.info("Testing flag is %r", config['sync2jira']['testing'])
+ mapping = config['sync2jira']['map']
+ warnings.simplefilter("ignore")
+
+ for upstream in mapping.get('pagure', {}).keys():
+ for issue in u_issue.pagure_issues(upstream, config):
+ try:
+ d_issue.close_duplicates(issue, config)
+ except Exception:
+ log.error("Failed on %r", issue)
+ raise
+ log.info("Done with pagure duplicates.")
+
+ for upstream in mapping.get('github', {}).keys():
+ for issue in u_issue.github_issues(upstream, config):
+ try:
+ d_issue.close_duplicates(issue, config)
+ except Exception:
+ log.error("Failed on %r", issue)
+ raise
+ log.info("Done with github duplicates.")
+
+
if __name__ == '__main__':
- main()
\ No newline at end of file
+ main()
diff --git a/sync2jira/upstream_issue.py b/sync2jira/upstream_issue.py
index 4ee3b75..834453c 100644
--- a/sync2jira/upstream_issue.py
+++ b/sync2jira/upstream_issue.py
@@ -36,37 +36,68 @@
log = logging.getLogger('sync2jira')
-def handle_github_message(config, msg):
+def handle_github_message(msg, config, pr_filter=True):
"""
- Handle GitHub message from webhook.
+ Handle GitHub message from FedMsg.
- :param Dict msg: webhook Message
+ :param Dict msg: FedMsg Message
:param Dict config: Config File
+ :param Bool pr_filter: Switch to ignore pull_requests
:returns: Issue object
:rtype: sync2jira.intermediary.Issue
"""
- owner = msg['repository']['owner']['login']
- repo = msg['repository']['name']
-
+ owner = msg['msg']['repository']['owner']['login']
+ repo = msg['msg']['repository']['name']
upstream = '{owner}/{repo}'.format(owner=owner, repo=repo)
mapped_repos = config['sync2jira']['map']['github']
if upstream not in mapped_repos:
log.debug("%r not in Github map: %r", upstream, mapped_repos.keys())
return None
+ elif 'issue' not in mapped_repos[upstream]['sync'] and pr_filter is True:
+ log.debug("%r not in Github Issue map: %r", upstream, mapped_repos.keys())
+ return None
+ elif 'pullrequest' not in mapped_repos[upstream]['sync'] and pr_filter is False:
+ log.debug("%r not in Github PR map: %r", upstream, mapped_repos.keys())
+ return None
+
+ _filter = config['sync2jira']\
+ .get('filters', {})\
+ .get('github', {})\
+ .get(upstream, {})
+
+ for key, expected in _filter.items():
+ # special handling for label: we look for it in the list of msg labels
+ if key == 'labels':
+ actual = [label['name'] for label in msg['msg']['issue']['labels']]
+ if expected not in actual:
+ log.debug("Label %s not set on issue: %s", expected, upstream)
+ return None
+ else:
+ # direct comparison
+ actual = msg['msg']['issue'].get(key)
+ if actual != expected:
+ log.debug("Actual %r %r != expected %r on issue %s",
+ key, actual, expected, upstream)
+ return None
+
+ if pr_filter and 'pull_request' in msg['msg']['issue']:
+ if not msg['msg']['issue'].get('closed_at', None):
+ log.debug("%r is a pull request. Ignoring.", msg['msg']['issue'].get('html_url'))
+ return None
# Initialize Github object so we can get their full name (instead of their username)
# And get comments if needed
github_client = Github(config['sync2jira']['github_token'], retry=5)
# If there are no comments just make an empty array
- if msg['issue']['comments'] == 0:
- msg['issue']['comments'] = []
+ if msg['msg']['issue']['comments'] == 0:
+ msg['msg']['issue']['comments'] = []
else:
# We have multiple comments and need to make api call to get them
repo = github_client.get_repo(upstream)
comments = []
- github_issue = repo.get_issue(number=msg['issue']['number'])
+ github_issue = repo.get_issue(number=msg['msg']['issue']['number'])
for comment in github_issue.get_comments():
# First make API call to get the users name
comments.append({
@@ -78,40 +109,160 @@ def handle_github_message(config, msg):
'changed': None
})
# Assign the message with the newly formatted comments :)
- msg['issue']['comments'] = comments
+ msg['msg']['issue']['comments'] = comments
# Search for the user
- reporter = github_client.get_user(msg['issue']['user']['login'])
+ reporter = github_client.get_user(msg['msg']['issue']['user']['login'])
# Update the reporter field in the message (to match Pagure format)
if reporter.name:
- msg['issue']['user']['fullname'] = reporter.name
+ msg['msg']['issue']['user']['fullname'] = reporter.name
else:
- msg['issue']['user']['fullname'] = \
- msg['issue']['user']['login']
+ msg['msg']['issue']['user']['fullname'] = \
+ msg['msg']['issue']['user']['login']
# Now do the same thing for the assignees
assignees = []
- for person in msg['issue']['assignees']:
+ for person in msg['msg']['issue']['assignees']:
assignee = github_client.get_user(person['login'])
assignees.append({'fullname': assignee.name})
# Update the assignee field in the message (to match Pagure format)
- msg['issue']['assignees'] = assignees
+ msg['msg']['issue']['assignees'] = assignees
# Update the label field in the message (to match Pagure format)
- if msg['issue']['labels']:
+ if msg['msg']['issue']['labels']:
# loop through all the labels on Github and add them
# to the new label list and then reassign the message
new_label = []
- for label in msg['issue']['labels']:
+ for label in msg['msg']['issue']['labels']:
new_label.append(label['name'])
- msg['issue']['labels'] = new_label
+ msg['msg']['issue']['labels'] = new_label
# Update the milestone field in the message (to match Pagure format)
- if msg['issue']['milestone']:
- msg['issue']['milestone'] = msg['issue']['milestone']['title']
+ if msg['msg']['issue']['milestone']:
+ msg['msg']['issue']['milestone'] = msg['msg']['issue']['milestone']['title']
+
+ return i.Issue.from_github(upstream, msg['msg']['issue'], config)
- return i.Issue.from_github(upstream, msg['issue'], config)
+
+def handle_pagure_message(msg, config):
+ """
+ Handle Pagure message from FedMsg.
+
+ :param Dict msg: FedMsg Message
+ :param Dict config: Config File
+ :returns: Issue object
+ :rtype: sync2jira.intermediary.Issue
+ """
+ upstream = msg['msg']['project']['name']
+ ns = msg['msg']['project'].get('namespace') or None
+ if ns:
+ upstream = '{ns}/{upstream}'.format(ns=ns, upstream=upstream)
+ mapped_repos = config['sync2jira']['map']['pagure']
+
+ if upstream not in mapped_repos:
+ log.debug("%r not in Pagure map: %r", upstream, mapped_repos.keys())
+ return None
+ elif 'issue' not in mapped_repos[upstream]['sync']:
+ log.debug("%r not in Pagure issue map: %r", upstream, mapped_repos.keys())
+ return None
+
+ _filter = config['sync2jira']\
+ .get('filters', {})\
+ .get('pagure', {}) \
+ .get(upstream, {})
+
+ if _filter:
+ for key, expected in _filter.items():
+ # special handling for tag: we look for it in the list of msg tags
+ if key == 'tags':
+ actual = msg['msg']['issue'].get('tags', []) + msg['msg'].get('tags', [])
+
+ # Some messages send tags as strings, others as dicts. Handle both.
+ actual = \
+ [tag['name'] for tag in actual if isinstance(tag, dict)] + \
+ [tag for tag in actual if isinstance(tag, string_type)]
+
+ intersection = set(actual) & set(expected)
+ if not intersection:
+ log.debug("None of %r in %r on issue: %s",
+ expected, actual, upstream)
+ return None
+ else:
+ # direct comparison
+ actual = msg['msg']['issue'].get(key)
+ if actual != expected:
+ log.debug("Actual %r %r != expected %r on issue: %s",
+ key, actual, expected, upstream)
+ return None
+
+ # If this is a dropped issue upstream
+ try:
+ if msg['topic'] == 'io.pagure.prod.pagure.issue.drop':
+ msg['msg']['issue']['status'] = 'Dropped'
+ except KeyError:
+ # Otherwise do nothing
+ pass
+
+ # If this is a tag edit upstream
+ try:
+ # Add all updated tags to the tags on the issue
+ for tag in msg['msg']['tags']:
+ msg['msg']['issue']['tags'].append(tag)
+ except KeyError:
+ # Otherwise do nothing
+ pass
+
+ # If this is a comment edit
+ try:
+ # Add it to the comments on the issue
+ msg['msg']['issue']['comments'].append(msg['msg']['comment'])
+ except KeyError:
+ # Otherwise do nothing
+ pass
+
+ # Format the assignee field to match github (i.e. in a list)
+ msg['msg']['issue']['assignee'] = [msg['msg']['issue']['assignee']]
+
+ return i.Issue.from_pagure(upstream, msg['msg']['issue'], config)
+
+
+def pagure_issues(upstream, config):
+ """
+ Creates a Generator for all Pagure issues in upstream repo.
+
+ :param String upstream: Upstream Repo
+ :param Dict config: Config Dict
+ :returns: Pagure Issue object generator
+ :rtype: sync2jira.intermediary.Issue
+ """
+ base = config['sync2jira'].get('pagure_url', 'https://pagure.io')
+ url = base + '/api/0/' + upstream + '/issues'
+
+ params = config['sync2jira']\
+ .get('filters', {})\
+ .get('pagure', {}) \
+ .get(upstream, {})
+
+ response = requests.get(url, params=params)
+ if not bool(response):
+ try:
+ reason = response.json()
+ except Exception:
+ reason = response.text
+ raise IOError("response: %r %r %r" % (response, reason, response.request.url))
+ data = response.json()['issues']
+
+ # Reformat the assignee value so that it is enclosed within an array
+ # We do this because Github supports multiple assignees, but JIRA doesn't :(
+ # Hopefully in the future it will support multiple assignees, thus enclosing
+ # the assignees in a list prepares for that support
+ for issue in data:
+ issue['assignee'] = [issue['assignee']]
+
+ issues = (i.Issue.from_pagure(upstream, issue, config) for issue in data)
+ for issue in issues:
+ yield issue
def github_issues(upstream, config):
@@ -124,26 +275,39 @@ def github_issues(upstream, config):
:rtype: sync2jira.intermediary.Issue
"""
token = config['sync2jira'].get('github_token')
-
+ if not token:
+ headers = {}
+ log.warning('No github_token found. We will be rate-limited...')
+ else:
+ headers = {'Authorization': 'token ' + token}
+
+ _filter = config['sync2jira']\
+ .get('filters', {})\
+ .get('github', {})\
+ .get(upstream, {})
+
+ url = 'https://api.github.com/repos/%s/issues' % upstream
+ if _filter:
+ url += '?' + urlencode(_filter)
+
+ issues = get_all_github_data(url, headers)
+
# Initialize Github object so we can get their full name (instead of their username)
# And get comments if needed
github_client = Github(config['sync2jira']['github_token'], retry=5)
- issues = get_all_github_data(upstream, github_client)
# We need to format everything to a standard to we can create an issue object
final_issues = []
for issue in issues:
- final_issue = {}
-
# Update comments:
# If there are no comments just make an empty array
- if issue.comments == 0:
- final_issue['comments'] = []
+ if issue['comments'] == 0:
+ issue['comments'] = []
else:
# We have multiple comments and need to make api call to get them
repo = github_client.get_repo(upstream)
comments = []
- github_issue = repo.get_issue(number=issue.number)
+ github_issue = repo.get_issue(number=issue['number'])
for comment in github_issue.get_comments():
# First make API call to get the users name
comments.append({
@@ -155,68 +319,58 @@ def github_issues(upstream, config):
'changed': None
})
# Assign the message with the newly formatted comments :)
- final_issue['comments'] = comments
+ issue['comments'] = comments
# Update reporter:
# Search for the user
- reporter = github_client.get_user(issue.user.login)
- final_issue['user'] = {}
+ reporter = github_client.get_user(issue['user']['login'])
+ # Update the reporter field in the message (to match Pagure format)
if reporter.name:
- final_issue['user']['fullname'] = reporter.name
+ issue['user']['fullname'] = reporter.name
else:
- final_issue['user']['fullname'] = issue.user.login
+ issue['user']['fullname'] = issue['user']['login']
# Update assignee(s):
assignees = []
- for person in issue.assignees:
- assignee = github_client.get_user(person.login)
+ for person in issue['assignees']:
+ assignee = github_client.get_user(person['login'])
assignees.append({'fullname': assignee.name})
# Update the assignee field in the message (to match Pagure format)
- final_issue['assignees'] = assignees
+ issue['assignees'] = assignees
# Update label(s):
- if issue.labels:
+ if issue['labels']:
# loop through all the labels on Github and add them
# to the new label list and then reassign the message
new_label = []
- for label in issue.labels:
- new_label.append(label.name)
- final_issue['labels'] = new_label
- else:
- final_issue['labels'] = []
+ for label in issue['labels']:
+ new_label.append(label['name'])
+ issue['labels'] = new_label
# Update milestone:
- if issue.milestone:
- final_issue['milestone'] = issue.milestone.title
- else:
- final_issue['milestone'] = None
-
- # Finish up creating any other mappings
- final_issue['state'] = issue.state
- final_issue['title'] = issue.title
- final_issue['html_url'] = issue.html_url
- final_issue['body'] = issue.body
- final_issue['assignees'] = issue.assignees
- final_issue['state'] = issue.state
- final_issue['id'] = issue.id
- final_issue['number'] = issue.number
-
- final_issues.append(final_issue)
+ if issue.get('milestone', None):
+ issue['milestone'] = issue['milestone']['title']
+
+ final_issues.append(issue)
final_issues = list((
i.Issue.from_github(upstream, issue, config) for issue in final_issues
+ if 'pull_request' not in issue # We don't want to copy these around
))
-
for issue in final_issues:
yield issue
-def get_all_github_data(upstream, github_client):
- """ Helper function to get all issues for a upstream repo """
- repo = github_client.get_repo(upstream)
- for issue in repo.get_issues():
- if (not issue.pull_request):
+def get_all_github_data(url, headers):
+ """ Pagination utility. Obnoxious. """
+ link = dict(next=url)
+ while 'next' in link:
+ response = _fetch_github_data(link['next'], headers)
+ for issue in response.json():
+ comments = _fetch_github_data(issue['comments_url'], headers)
+ issue['comments'] = comments.json()
yield issue
+ link = _github_link_field_to_dict(response.headers.get('link', None))
def _github_link_field_to_dict(field):
diff --git a/sync2jira/upstream_pr.py b/sync2jira/upstream_pr.py
index b049582..b1aa37d 100644
--- a/sync2jira/upstream_pr.py
+++ b/sync2jira/upstream_pr.py
@@ -37,18 +37,60 @@
log = logging.getLogger('sync2jira')
-def handle_github_message(config, msg):
+def handle_pagure_message(msg, config, suffix):
+ """
+ Handle Pagure message from FedMsg.
+
+ :param Dict msg: FedMsg Message
+ :param Dict config: Config File
+ :returns: Issue object
+ :rtype: sync2jira.intermediary.PR
+ """
+ # Extract our upstream name
+ upstream = msg['msg']['pullrequest']['project']['name']
+ ns = msg['msg']['pullrequest']['project'].get('namespace') or None
+ if ns:
+ upstream = '{ns}/{upstream}'.format(ns=ns, upstream=upstream)
+ mapped_repos = config['sync2jira']['map']['pagure']
+
+ # Check if we should sync this PR
+ if upstream not in mapped_repos:
+ log.debug("%r not in Pagure map: %r", upstream, mapped_repos.keys())
+ return None
+ elif 'pullrequest' not in mapped_repos[upstream]['sync']:
+ log.debug("%r not in Pagure PR map: %r", upstream, mapped_repos.keys())
+ return None
+
+ # Format the assignee field to match github (i.e. in a list)
+ msg['msg']['pullrequest']['assignee'] = [msg['msg']['pullrequest']['assignee']]
+
+ # Update suffix, Pagure suffix only register as comments
+ if msg['msg']['pullrequest']['status'] == 'Closed':
+ suffix = 'closed'
+ elif msg['msg']['pullrequest']['status'] == 'Merged':
+ suffix = 'merged'
+ elif msg['msg']['pullrequest'].get('closed_by') and \
+ msg['msg']['pullrequest']['status'] == 'Open':
+ suffix = 'reopened'
+ elif msg['msg']['pullrequest']['status'] == 'Open':
+ suffix = 'open'
+
+ return i.PR.from_pagure(upstream, msg['msg']['pullrequest'], suffix, config)
+
+
+def handle_github_message(msg, config, suffix):
"""
Handle GitHub message from FedMsg.
- :param Dict msg: Webhook Message
+ :param Dict msg: FedMsg Message
:param Dict config: Config File
+ :param String suffix: FedMsg suffix
:returns: Issue object
:rtype: sync2jira.intermediary.PR
"""
# Create our title (i.e. owner/repo)
- owner = msg['repository']['owner']['login']
- repo = msg['repository']['name']
+ owner = msg['msg']['repository']['owner']['login']
+ repo = msg['msg']['repository']['name']
upstream = '{owner}/{repo}'.format(owner=owner, repo=repo)
# Check if upstream is in mapped repos
@@ -65,13 +107,13 @@ def handle_github_message(config, msg):
github_client = Github(config['sync2jira']['github_token'])
# If there are no comments just make an empty array
- if msg['pull_request']['comments'] == 0:
- msg['pull_request']['comments'] = []
+ if msg['msg']['pull_request']['comments'] == 0:
+ msg['msg']['pull_request']['comments'] = []
else:
# We have multiple comments and need to make api call to get them
repo = github_client.get_repo(upstream)
comments = []
- github_pr = repo.get_pull(number=msg['pull_request']['number'])
+ github_pr = repo.get_pull(number=msg['msg']['pull_request']['number'])
for comment in github_pr.get_issue_comments():
# First make API call to get the users name
comments.append({
@@ -83,47 +125,83 @@ def handle_github_message(config, msg):
'changed': None
})
# Assign the message with the newly formatted comments :)
- msg['pull_request']['comments'] = comments
+ msg['msg']['pull_request']['comments'] = comments
# Search for the user
- reporter = github_client.get_user(msg['pull_request']['user']['login'])
+ reporter = github_client.get_user(msg['msg']['pull_request']['user']['login'])
# Update the reporter field in the message (to match Pagure format)
if reporter.name:
- msg['pull_request']['user']['fullname'] = reporter.name
+ msg['msg']['pull_request']['user']['fullname'] = reporter.name
else:
- msg['pull_request']['user']['fullname'] = \
- msg['pull_request']['user']['login']
+ msg['msg']['pull_request']['user']['fullname'] = \
+ msg['msg']['pull_request']['user']['login']
# Now do the same thing for the assignees
assignees = []
- for person in msg['pull_request']['assignees']:
+ for person in msg['msg']['pull_request']['assignees']:
assignee = github_client.get_user(person['login'])
assignees.append({'fullname': assignee.name})
# Update the assignee field in the message (to match Pagure format)
- msg['pull_request']['assignees'] = assignees
+ msg['msg']['pull_request']['assignees'] = assignees
# Update the label field in the message (to match Pagure format)
- if msg['pull_request']['labels']:
+ if msg['msg']['pull_request']['labels']:
# loop through all the labels on Github and add them
# to the new label list and then reassign the message
new_label = []
- for label in msg['pull_request']['labels']:
+ for label in msg['msg']['pull_request']['labels']:
new_label.append(label['name'])
- msg['pull_request']['labels'] = new_label
+ msg['msg']['pull_request']['labels'] = new_label
# Update the milestone field in the message (to match Pagure format)
- if msg['pull_request']['milestone']:
- msg['pull_request']['milestone'] = msg['pull_request']['milestone']['title']
+ if msg['msg']['pull_request']['milestone']:
+ msg['msg']['pull_request']['milestone'] = msg['msg']['pull_request']['milestone']['title']
+
+ return i.PR.from_github(upstream, msg['msg']['pull_request'], suffix, config)
+
+
+def pagure_prs(upstream, config):
+ """
+ Creates a Generator for all Pagure PRs in upstream repo.
- # Determin the suffix
- suffix = msg['action']
- if (suffix == 'closed'):
- # Check if this PR has been merged
- if (msg['pull_request']['merged_at'] is not None):
- suffix = 'merged'
+ :param String upstream: Upstream Repo
+ :param Dict config: Config Dict
+ :returns: Pagure Issue object generator
+ :rtype: sync2jira.intermediary.PR
+ """
+ # Build our our URL
+ base = config['sync2jira'].get('pagure_url', 'https://pagure.io')
+ url = base + '/api/0/' + upstream + '/pull-requests'
+
+ # Get our filters
+ params = config['sync2jira']\
+ .get('filters', {})\
+ .get('pagure', {}) \
+ .get(upstream, {})
+
+ # Make a GET call to Pagure.io
+ response = requests.get(url, params=params)
+
+ # Catch if we have an error
+ if not bool(response):
+ try:
+ reason = response.json()
+ except Exception:
+ reason = response.text
+ raise IOError("response: %r %r %r" % (response, reason, response.request.url))
+
+ # Extract and format our data
+ data = response.json()['requests']
+
+ # Reformat Assignee
+ for pr in data:
+ pr['assignee'] = [pr['assignee']]
- return i.PR.from_github(upstream, msg['pull_request'], suffix, config)
+ # Build our final list of data and yield
+ prs = (i.PR.from_pagure(upstream, pr, 'open', config) for pr in data)
+ for pr in prs:
+ yield pr
def github_prs(upstream, config):
@@ -138,24 +216,43 @@ def github_prs(upstream, config):
# Get our GitHub token
token = config['sync2jira'].get('github_token')
- github_client = Github(config['sync2jira']['github_token'])
+ # Throw warning if we don't have a token set up
+ if not token:
+ headers = {}
+ log.warning('No github_token found. We will be rate-limited...')
+ else:
+ headers = {'Authorization': 'token ' + token}
+
+ # Get our filters
+ _filter = config['sync2jira'] \
+ .get('filters', {}) \
+ .get('github', {}) \
+ .get(upstream, {})
+
+ # Build our URL
+ url = 'https://api.github.com/repos/%s/pulls' % upstream
+ if _filter:
+ url += '?' + urlencode(_filter)
# Get our issues using helper functions
- prs = get_all_github_prs(upstream, github_client)
+ prs = u_issue.get_all_github_data(url, headers)
+
+ # Initialize Github object so we can get their full name (instead of their username)
+ # And get comments if needed
+ github_client = Github(config['sync2jira']['github_token'])
# Build our final list of prs
final_prs = []
for pr in prs:
- final_pr = {}
# Update comments:
# If there are no comments just make an empty array
- if pr.comments == 0:
- final_pr['comments'] = []
+ if len(pr['comments']) == 0:
+ pr['comments'] = []
else:
# We have multiple comments and need to make api call to get them
repo = github_client.get_repo(upstream)
comments = []
- github_pr = repo.get_pull(number=pr.number)
+ github_pr = repo.get_pull(number=pr['number'])
for comment in github_pr.get_issue_comments():
# First make API call to get the users name
comments.append({
@@ -167,58 +264,43 @@ def github_prs(upstream, config):
'changed': None
})
# Assign the message with the newly formatted comments :)
- final_pr['comments'] = comments
+ pr['comments'] = comments
# Update reporter:
# Search for the user
- reporter = github_client.get_user(pr.user.login)
+ reporter = github_client.get_user(pr['user']['login'])
# Update the reporter field in the message (to match Pagure format)
- final_pr['user'] = {}
if reporter.name:
- final_pr['user']['fullname'] = reporter.name
+ pr['user']['fullname'] = reporter.name
else:
- final_pr['user']['fullname'] = pr.user.login
+ pr['user']['fullname'] = pr['user']['login']
# Update assignee(s):
assignees = []
- for person in pr.assignees:
- assignee = github_client.get_user(person.login)
+ for person in pr.get('assignees', []):
+ assignee = github_client.get_user(person['login'])
assignees.append({'fullname': assignee.name})
# Update the assignee field in the message (to match Pagure format)
- final_pr['assignees'] = assignees
+ pr['assignees'] = assignees
# Update label(s):
- if pr.labels:
+ if pr['labels']:
# loop through all the labels on Github and add them
# to the new label list and then reassign the message
new_label = []
- for label in pr.labels:
+ for label in pr['labels']:
new_label.append(label['name'])
- final_pr['labels'] = new_label
+ pr['labels'] = new_label
# Update milestone:
- if pr.milestone:
- final_pr['milestone'] = pr.milestone.title
-
- # Finish up creating any other mappings
- final_pr['html_url'] = pr.html_url
- final_pr['title'] = pr.title
- final_pr['body'] = pr.body
- final_pr['number'] = pr.number
-
- final_prs.append(final_pr)
+ if pr.get('milestone', []):
+ pr['milestone'] = pr['milestone']['title']
+
+ final_prs.append(pr)
# Build our final list of data and yield
final_prs = list((
i.PR.from_github(upstream, pr, 'open', config) for pr in final_prs
))
for issue in final_prs:
yield issue
-
-
-def get_all_github_prs(upstream, github_client):
- """ Helper function to get all Prs for an upstream repo """
- repo = github_client.get_repo(upstream)
- for issue in repo.get_issues():
- if (issue.pull_request):
- yield issue
\ No newline at end of file
diff --git a/test-requirements.txt b/test-requirements.txt
new file mode 100644
index 0000000..fbd9fc9
--- /dev/null
+++ b/test-requirements.txt
@@ -0,0 +1,6 @@
+python-coveralls
+coverage
+nose
+pytest
+pytest-cov
+mock
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/integration_tests/integration_test.py b/tests/integration_tests/integration_test.py
new file mode 100644
index 0000000..2f54d7e
--- /dev/null
+++ b/tests/integration_tests/integration_test.py
@@ -0,0 +1,152 @@
+"""
+This is a helper program to listen for UMB trigger. Test and then deploy Sync2Jira
+"""
+# Built-In Modules
+import os
+import logging
+import sys
+import types
+
+# Local Modules
+from sync2jira.main import main as m
+from jira_values import PAGURE, GITHUB
+from runtime_config import runtime_config
+
+# 3rd Party Modules
+import jira.client
+
+# Global Variables
+URL = os.environ['JIRA_STAGE_URL']
+USERNAME = os.environ['JIRA_USER']
+PASSWORD = os.environ['JIRA_PASS']
+log = logging.getLogger(__name__)
+hdlr = logging.FileHandler('integration_test.log')
+formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
+hdlr.setFormatter(formatter)
+log.addHandler(hdlr)
+log.setLevel(logging.DEBUG)
+
+
+def main():
+ """
+ Main message to listen and react to messages.
+ """
+ log.info("[OS-BUILD] Running sync2jira.main...")
+
+ # Make our JIRA client
+ client = get_jira_client()
+
+ # First init with what we have
+ m(runtime_test=True, runtime_config=runtime_config)
+
+ # Now we need to make sure that Sync2Jira didn't update anything,
+ failed = False
+
+ # Compare to our old values
+ log.info("[OS-BUILD] Comparing values with Pagure...")
+ try:
+ compare_data(client, PAGURE)
+ except Exception as e:
+ failed = True
+ log.info(f"[OS-BUILD] When comparing Pagure something went wrong.\nException {e}")
+
+ log.info("[OS-BUILD] Comparing values with GitHub...")
+ try:
+ compare_data(client, GITHUB)
+ except Exception as e:
+ failed = True
+ log.info(f"[OS-BUILD] When comparing GitHub something went wrong.\nException {e}")
+
+ if failed:
+ log.info("[OS-BUILD] Tests have failed :(")
+ raise Exception()
+ else:
+ log.info("[OS-BUILD] Tests have passed :)")
+
+
+def compare_data(client, data):
+ """
+ Helper function to loop over values and compare to ensure they are the same
+ :param jira.client.JIRA client: JIRA client
+ :param Dict data: Data used to compare against
+ :return: True/False if we
+ """
+ # First get our existing JIRA issue
+ jira_ticket = data['JIRA']
+ existing = client.search_issues(f"Key = {jira_ticket}")
+
+ # Throw an error if too many issues were found
+ if len(existing) > 1:
+ raise Exception(f"Too many issues were found with ticket {jira_ticket}")
+
+ existing = existing[0]
+ log.info("TEST - "+existing.fields.summary)
+ # Check Tags
+ if data['tags'] != existing.fields.labels:
+ raise Exception(f"Error when comparing tags for {jira_ticket}\n"
+ f"Expected: {data['tags']}\n"
+ f"Actual: {existing.fields.labels}")
+
+ # Check FixVersion
+ formatted_fixVersion = format_fixVersion(existing.fields.fixVersions)
+
+ if data['fixVersions'] != formatted_fixVersion:
+ raise Exception(f"Error when comparing fixVersions for {jira_ticket}\n"
+ f"Expected: {data['fixVersions']}\n"
+ f"Actual: {formatted_fixVersion}")
+
+ # Check Assignee
+ if not existing.fields.assignee:
+ raise Exception(f"Error when comparing assignee for {jira_ticket}\n"
+ f"Expected: {data['assignee']}\n"
+ f"Actual: {existing.fields.assignee}")
+
+ elif data['assignee'] != existing.fields.assignee.name:
+ raise Exception(f"Error when comparing assignee for {jira_ticket}\n"
+ f"Expected: {data['assignee']}\n"
+ f"Actual: {existing.fields.assignee.name}")
+
+ # Check Title
+ if data['title'] != existing.fields.summary:
+ raise Exception(f"Error when comparing title for {jira_ticket}\n"
+ f"Expected: {data['title']}\n"
+ f"Actual: {existing.fields.summary}")
+
+ # Check Descriptions
+ if data['description'].replace("\n", "").replace(" ", "").replace("\r", "") != existing.fields.description.replace("\n", "").replace(" ", "").replace("\r", ""):
+ raise Exception(f"Error when comparing descriptions for {jira_ticket}\n"
+ f"Expected: {data['description']}\n"
+ f"Actual: {existing.fields.description}")
+
+
+def format_fixVersion(existing):
+ """
+ Helper function to format fixVersions
+ :param jira.version existing: Existing fixVersions
+ :return: Formatted fixVersions
+ :rtype: List
+ """
+ new_list = []
+ for version in existing:
+ new_list.append(version.name)
+ return new_list
+
+
+def get_jira_client():
+ """
+ Helper function to get JIRA client
+ :return: JIRA Client
+ :rtype: jira.client.JIRA
+ """
+ return jira.client.JIRA(**{
+ 'options': {
+ 'server': URL,
+ 'verify': False,
+ },
+ 'basic_auth': (USERNAME, PASSWORD),
+ })
+
+
+if __name__ == '__main__':
+ # Call our main method after parsing out message
+ main()
\ No newline at end of file
diff --git a/tests/integration_tests/jira_values.py b/tests/integration_tests/jira_values.py
new file mode 100644
index 0000000..aabf702
--- /dev/null
+++ b/tests/integration_tests/jira_values.py
@@ -0,0 +1,16 @@
+PAGURE = {
+ 'JIRA': 'FACTORY-6185',
+ 'title': '[Demo_project] Test Issue DO NOT TOUCH',
+ 'description': '[1580140473] Upstream Reporter: Sid Premkumar \n Upstream issue status: Open\n Upstream description: {quote}Some Description{quote} \n Upstream URL: https://pagure.io/Demo_project/issue/34',
+ 'fixVersions': ['FY19 Q2'],
+ 'assignee': 'sid',
+ 'tags': ['tag1', 'tag2', 'tag3']
+}
+GITHUB = {
+ 'JIRA': 'FACTORY-6186',
+ 'title': '[sidpremkumar/Demo_repo] Test Issue DO NOT TOUCH',
+ 'description': '[555670302] Upstream Reporter: Sid Premkumar \n Upstream issue status: Open\nUpstream description: {quote}Some Description{quote} \nUpstream URL: https://github.com/sidpremkumar/Demo_repo/issues/30',
+ 'fixVersions': ['FY19 Q1'],
+ 'assignee': 'sid',
+ 'tags': ['bug'],
+}
diff --git a/tests/integration_tests/runtime_config.py b/tests/integration_tests/runtime_config.py
new file mode 100644
index 0000000..cc19d56
--- /dev/null
+++ b/tests/integration_tests/runtime_config.py
@@ -0,0 +1,63 @@
+import os
+
+runtime_config = {
+ 'sync2jira': {
+ 'jira': {
+ 'pnt-jira': {
+ 'options': {
+ 'server': os.environ['JIRA_STAGE_URL'],
+ 'verify': True,
+ },
+ 'basic_auth': (
+ os.environ['JIRA_USER'],
+ os.environ['JIRA_PASS'],
+ ),
+ },
+ },
+ 'github_token': os.environ['SYNC2JIRA_GITHUB_TOKEN'],
+ 'admins': [{'spremkum', 'spremkum@redhat.com'}, {'rbean', 'rbean@redhat.com'}],
+ 'initialize': True,
+ 'testing': False,
+ 'develop': True,
+ 'confluence_statistics': False,
+
+ # We don't need legacy mode anymore. Not for a long time. Let's
+ # remove it soon.
+ 'legacy_matching': False,
+
+ # Set the default jira to be pnt-jira
+ 'default_jira_instance': 'pnt-jira',
+
+ 'filters': {
+ 'pagure': {
+ },
+ 'github': {
+ },
+ },
+ 'map': {
+ 'pagure': {
+ 'Demo_project': {'project': 'FACTORY', 'component': 'gitbz',
+ 'issue_updates': [{'transition': True},
+ 'description',
+ 'title',
+ {'tags': {'overwrite': True}},
+ {'fixVersion': {'overwrite': True}},
+ {'assignee': {'overwrite': True}},
+ 'url'],
+ 'sync': ['issue']},
+ },
+ 'github': {
+ 'sidpremkumar/Demo_repo': {'project': 'FACTORY', 'component': 'gitbz',
+ 'issue_updates': [{'transition': True},
+ 'description',
+ 'title',
+ {'tags': {'overwrite': True}},
+ {'fixVersion': {'overwrite': True}},
+ {'assignee': {'overwrite': True}},
+ 'url'],
+ 'sync': ['issue']}
+
+ },
+ },
+ }
+}
diff --git a/tests/test_confluence_client.py b/tests/test_confluence_client.py
new file mode 100644
index 0000000..a979555
--- /dev/null
+++ b/tests/test_confluence_client.py
@@ -0,0 +1,306 @@
+import unittest
+
+import mock
+
+try:
+ # Python 3.3 >
+ from unittest.mock import MagicMock # noqa: F401
+except ImportError:
+ from mock import MagicMock # noqa: F401
+
+PATH = 'sync2jira.confluence_client.'
+
+from sync2jira.confluence_client import ConfluenceClient
+
+
+class TestConfluenceClient(unittest.TestCase):
+ """
+ This class tests the confluence_client.py file
+ """
+
+ @mock.patch(PATH + 'ConfluenceClient.find_page')
+ def setUp(self,
+ mock_find_page):
+ mock_find_page.return_value = "mock_page_id"
+ self.confluence_client = ConfluenceClient()
+
+ self.mock_resp_bad = MagicMock()
+ self.mock_resp_bad.ok = False
+
+ def test_update_state_value(self):
+ """
+ This function tests the 'update_stat_value' function
+ """
+ # Call the function
+ self.confluence_client.update_stat_value(True)
+
+ # Assert Everything was called correctly
+ self.assertEqual(self.confluence_client.update_stat, True)
+
+ @mock.patch(PATH + 'ConfluenceClient.get_auth_object')
+ @mock.patch(PATH + 'requests')
+ def test_req_kwargs_basic(self,
+ mock_requests,
+ mock_get_auth_object):
+ """
+ This function tests 'req_kwargs' property with a basic client
+ """
+ # Set up return values
+ mock_get_auth_object.return_value = 'mock_auth_object'
+
+ # Call the function
+ response = self.confluence_client.req_kwargs
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_not_called()
+ mock_get_auth_object.assert_called()
+ self.assertEqual(response, {'auth': 'mock_auth_object'})
+
+ @mock.patch(PATH + 'requests')
+ @mock.patch(PATH + 'ConfluenceClient.req_kwargs')
+ def test_find_page_found(self,
+ mock_req_kwargs,
+ mock_requests):
+ """
+ This function tests the 'find_page' function where we find a page
+ """
+ # Set up return values
+ mock_resp = MagicMock()
+ mock_resp.json.return_value = {'results': [{'id': 'mock_id'}]}
+ mock_requests.get.return_value = mock_resp
+
+ # Call the function
+ response = self.confluence_client.find_page()
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_called_with(
+ "http://mock_confluence_url/rest/api/content/search?cql=title='mock_confluence_page_title' and space=mock_confluence_space")
+ mock_resp.json.assert_called()
+ self.assertEqual(response, 'mock_id')
+
+ @mock.patch(PATH + 'requests')
+ @mock.patch(PATH + 'ConfluenceClient.req_kwargs')
+ def test_find_page_not_found(self,
+ mock_req_kwargs,
+ mock_requests):
+ """
+ This function tests the 'find_page' function where we don't find a page
+ """
+ # Set up return values
+ mock_resp = MagicMock()
+ mock_resp.json.return_value = {'results': []}
+ mock_requests.get.return_value = mock_resp
+
+ # Call the function
+ response = self.confluence_client.find_page()
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_called_with(
+ "http://mock_confluence_url/rest/api/content/search?cql=title='mock_confluence_page_title' and space=mock_confluence_space")
+ mock_resp.json.assert_called()
+ self.assertEqual(response, None)
+
+ @mock.patch(PATH + 'requests')
+ @mock.patch(PATH + 'ConfluenceClient.req_kwargs')
+ def test_find_page_error(self,
+ mock_req_kwargs,
+ mock_requests):
+ """
+ This function tests the 'find_page' function where we get an Error
+ """
+ # Set up return values
+ mock_resp = MagicMock()
+ mock_resp.json.return_value = {'results': []}
+ mock_requests.get.return_value = self.mock_resp_bad
+
+ # Call the function
+ self.confluence_client.find_page()
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_called_with(
+ "http://mock_confluence_url/rest/api/content/search?cql=title='mock_confluence_page_title' and space=mock_confluence_space")
+
+ @mock.patch(PATH + 'requests')
+ @mock.patch(PATH + 'ConfluenceClient.req_kwargs')
+ def test_get_page_info(self,
+ mock_req_kwargs,
+ mock_requests):
+ """
+ This function tests the 'get_page_info' function where we have no Errors
+ """
+ # Set up return values
+ mock_resp = MagicMock()
+ mock_resp.json.return_value = 'mock_json'
+ mock_requests.get.return_value = mock_resp
+
+ # Call the function
+ response = self.confluence_client.get_page_info('mock_page_id')
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_called_with(
+ 'http://mock_confluence_url/rest/api/content/mock_page_id?expand=ancestors,version,body.storage')
+ self.assertEqual(response, 'mock_json')
+
+ @mock.patch(PATH + 'requests')
+ @mock.patch(PATH + 'ConfluenceClient.req_kwargs')
+ def test_get_page_info_error(self,
+ mock_req_kwargs,
+ mock_requests):
+ """
+ This function tests the 'get_page_info' function where we have Errors
+ """
+ # Set up return values
+ mock_resp = MagicMock()
+ mock_resp.json.return_value = 'mock_json'
+ mock_requests.get.return_value = self.mock_resp_bad
+
+ # Call the function
+ self.confluence_client.get_page_info('mock_page_id')
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_called_with(
+ 'http://mock_confluence_url/rest/api/content/mock_page_id?expand=ancestors,version,body.storage')
+
+ @mock.patch(PATH + 'ConfluenceClient.get_page_info')
+ @mock.patch(PATH + 'requests')
+ @mock.patch(PATH + 'ConfluenceClient.req_kwargs')
+ def test_update_page(self,
+ mock_req_kwargs,
+ mock_requests,
+ mock_get_page_info):
+ """
+ This function tests the 'update_page' function where we have no Errors
+ """
+ # Set up return values
+ mock_get_page_info.return_value = {
+ 'version': {'number': 1},
+ 'title': 'mock_title'}
+ mock_resp = MagicMock()
+ mock_resp.ok = True
+ mock_resp.json.return_value = 'mock_json'
+ mock_requests.put.return_value = mock_resp
+
+ # Call the function
+ response = self.confluence_client.update_page(
+ page_id='mock_page_id',
+ html_str='mock_html_str',
+ )
+
+ # Assert everything was called correctly
+ mock_requests.put.assert_called_with(
+ 'http://mock_confluence_url/rest/api/content/mock_page_id',
+ json={'id': 'mock_page_id', 'type': 'page',
+ 'title': 'mock_title', 'version': {'number': 2},
+ 'body': {'storage':
+ {'representation': 'storage', 'value': 'mock_html_str'}}})
+ self.assertEqual(response, 'mock_json')
+
+ @mock.patch(PATH + 'ConfluenceClient.get_page_info')
+ @mock.patch(PATH + 'requests')
+ @mock.patch(PATH + 'ConfluenceClient.req_kwargs')
+ def test_update_page_error(self,
+ mock_req_kwargs,
+ mock_requests,
+ mock_get_page_info):
+ """
+ This function tests the 'update_page' function where we have Errors
+ """
+ # Set up return values
+ mock_get_page_info.return_value = {
+ 'version': {'number': 1},
+ 'title': 'mock_title'}
+ mock_requests.put.return_value = self.mock_resp_bad
+
+ # Call the function
+ self.confluence_client.update_page(
+ page_id='mock_page_id',
+ html_str='mock_html_str',
+ )
+
+ # Assert everything was called correctly
+ mock_requests.put.assert_called_with(
+ 'http://mock_confluence_url/rest/api/content/mock_page_id',
+ json={
+ 'id': 'mock_page_id',
+ 'type': 'page',
+ 'title': 'mock_title',
+ 'version': {'number': 2},
+ 'body':
+ {'storage': {'representation': 'storage', 'value': 'mock_html_str'}}})
+
+ @mock.patch(PATH + 'HTTPBasicAuth')
+ def test_get_auth_object_basic(self,
+ mock_basic,):
+ """
+ This function tests 'get_auth_object' with basic auth
+ """
+ # Set up return values
+ mock_basic.return_value = 'mock_basic_auth'
+
+ # Call the function
+ response = self.confluence_client.get_auth_object()
+
+ # Assert everything was called correctly
+ self.assertEqual(response, 'mock_basic_auth')
+ mock_basic.assert_called_with('mock_confluence_username', 'mock_confluence_password')
+
+ @mock.patch(PATH + 'ConfluenceClient.update_page')
+ @mock.patch(PATH + 'jinja2')
+ @mock.patch(PATH + 'ConfluenceClient.get_page_info')
+ def test_update_stat_page(self,
+ mock_get_page_info,
+ mock_jinja2,
+ mock_update_page):
+ """
+ This function tests 'update_stat_page' function
+ """
+ # Set up return values
+ mock_html = """
+ Created Issues 1<
+ Descriptions 1<
+ Comments 1<
+ Reporters 1<
+ Assignees 1<
+ Status 1<
+ Transitions 1<
+ Titles 1<
+ Tags 1<
+ Fix Version 1<
+ Misc. Fields 1<
+ Total 1<
+ """
+ mock_get_page_info.return_value = {'body': {'storage': {'value': mock_html}}}
+ mock_confluence_data = {
+ 'Created Issues': 10,
+ 'Descriptions': 10,
+ 'Comments': 10,
+ 'Reporters': 10,
+ 'Status': 10,
+ 'Assignees': 10,
+ 'Transitions': 10,
+ 'Title': 10,
+ 'Tags': 10,
+ 'FixVersion': 10,
+ 'Misc. Fields': 10,
+ }
+ mock_templateLoader = MagicMock()
+ mock_templateEnv = MagicMock()
+ mock_template = MagicMock()
+ mock_template.render.return_value = 'mock_render'
+ mock_templateEnv.get_template.return_value = mock_template
+ mock_jinja2.FileSystemLoader.return_value = mock_templateLoader
+ mock_jinja2.Environment.return_value = mock_templateEnv
+
+ # Call the function
+ self.confluence_client.update_stat_page(mock_confluence_data)
+
+ # Assert Everything was called correctly
+ mock_jinja2.FileSystemLoader.assert_called_with(searchpath='usr/local/src/sync2jira/sync2jira/')
+ mock_jinja2.Environment.assert_called_with(loader=mock_templateLoader)
+ mock_templateEnv.get_template.assert_called_with('confluence_stat.jinja')
+ mock_template.render.assert_called_with(confluence_data={
+ 'Created Issues': 11, 'Descriptions': 11, 'Comments': 11,
+ 'Reporters': 11, 'Status': 11, 'Assignees': 11, 'Transitions': 11,
+ 'Title': 11, 'Tags': 11, 'FixVersion': 11, 'Misc. Fields': 11,
+ 'Total': 121, 'Total Time': '0:50:25 (HR:MIN:SEC)'})
+ mock_update_page.assert_called_with('mock_page_id', 'mock_render')
diff --git a/tests/test_downstream_issue.py b/tests/test_downstream_issue.py
new file mode 100644
index 0000000..5eceb2e
--- /dev/null
+++ b/tests/test_downstream_issue.py
@@ -0,0 +1,1778 @@
+import mock
+import unittest
+try:
+ # Python 3.3 >
+ from unittest.mock import MagicMock # noqa: F401
+except ImportError:
+ from mock import MagicMock # noqa: F401
+from datetime import datetime
+
+import sync2jira.downstream_issue as d
+from sync2jira.intermediary import Issue
+
+from nose.tools import eq_
+import jira.client
+from jira import JIRAError
+
+PATH = 'sync2jira.downstream_issue.'
+
+
+class TestDownstreamIssue(unittest.TestCase):
+ """
+ This class tests the downstream_issue.py file under sync2jira
+ """
+ def setUp(self):
+ """
+ Setting up the testing environment
+ """
+ # Mock Config dict
+ self.mock_config = {
+ 'sync2jira': {
+ 'default_jira_instance': 'another_jira_instance',
+ 'jira': {
+ 'mock_jira_instance': {'mock_jira': 'mock_jira'},
+ 'another_jira_instance': {'basic_auth': ['mock_user'],
+ 'options': {'server': 'mock_server'}}
+ },
+ 'testing': {},
+ 'legacy_matching': False,
+ 'admins': [{'mock_admin': 'mock_email'}],
+ 'develop': False
+ },
+ }
+
+ # Mock sync2jira.intermediary.Issue
+ self.mock_issue = MagicMock()
+ self.mock_issue.assignee = [{'fullname': 'mock_user'}]
+ self.mock_issue.downstream = {
+ 'project': 'mock_project',
+ 'custom_fields': {'somecustumfield': 'somecustumvalue'},
+ 'type': 'Fix',
+ 'qa-contact': 'dummy@dummy.com',
+ 'epic-link': 'DUMMY-1234',
+ 'EXD-Service': {'guild': 'EXD-Project', 'value': 'EXD-Value'},
+ 'issue_updates': [
+ 'comments',
+ {'tags': {'overwrite': False}},
+ {'fixVersion': {'overwrite': False}},
+ {'assignee': {'overwrite': True}}, 'description', 'title',
+ {'transition': 'CUSTOM TRANSITION'},
+ {'on_close': {"apply_labels": ["closed-upstream"]}}
+ ],
+ 'owner': 'mock_owner'
+ }
+ self.mock_issue.content = 'mock_content'
+ self.mock_issue.reporter = {'fullname': 'mock_user'}
+ self.mock_issue.url = 'mock_url'
+ self.mock_issue.title = 'mock_title'
+ self.mock_issue.comments = 'mock_comments'
+ self.mock_issue.tags = ['tag1', 'tag2']
+ self.mock_issue.fixVersion = ['fixVersion3', 'fixVersion4']
+ self.mock_issue.fixVersion = ['fixVersion3', 'fixVersion4']
+ self.mock_issue.assignee = [{'fullname': 'mock_assignee'}]
+ self.mock_issue.status = 'Open'
+ self.mock_issue.id = '1234'
+
+ # Mock issue updates
+ self.mock_updates = [
+ 'comments',
+ {'tags': {'overwrite': False}},
+ {'fixVersion': {'overwrite': False}},
+ {'assignee': {'overwrite': True}}, 'description', 'title',
+ {'transition': 'CUSTOM TRANSITION'},
+ {'on_close': {"apply_labels": ["closed-upstream"]}}
+ ]
+
+ # Mock Jira transition
+ self.mock_transition = [{
+ 'name': 'custom_closed_status',
+ 'id': 1234
+ }]
+
+ # Mock jira.resources.Issue
+ self.mock_downstream = MagicMock()
+ self.mock_downstream.id = 1234
+ self.mock_downstream.fields.labels = ['tag3', 'tag4']
+ mock_version1 = MagicMock()
+ mock_version1.name = 'fixVersion3'
+ mock_version2 = MagicMock()
+ mock_version2.name = 'fixVersion4'
+ self.mock_downstream.fields.fixVersions = [mock_version1, mock_version2]
+ self.mock_downstream.update.return_value = True
+ self.mock_downstream.fields.description = "This is an existing description"
+
+ # Mock datetime.today()
+ self.mock_today = MagicMock()
+ self.mock_today.strftime.return_value = 'mock_today'
+
+ @mock.patch('jira.client.JIRA')
+ def test_get_jira_client_not_issue(self,
+ mock_client):
+ """
+ This tests 'get_jira_client' function where the passed in
+ argument is not an Issue instance
+ """
+ # Call the function
+ with self.assertRaises(Exception):
+ d.get_jira_client(
+ issue='string',
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.assert_not_called()
+
+ @mock.patch('jira.client.JIRA')
+ def test_get_jira_client_not_instance(self,
+ mock_client):
+ """
+ This tests 'get_jira_client' function there is no JIRA instance
+ """
+ # Set up return values
+ self.mock_issue.downstream = {}
+ self.mock_config['sync2jira']['default_jira_instance'] = {}
+
+ # Call the function
+ with self.assertRaises(Exception):
+ d.get_jira_client(
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.assert_not_called()
+
+ @mock.patch('jira.client.JIRA')
+ def test_get_jira_client(self,
+ mock_client):
+ """
+ This tests 'get_jira_client' function where everything goes smoothly
+ """
+ # Set up return values
+ mock_issue = MagicMock(spec=Issue)
+ mock_issue.downstream = {'jira_instance': 'mock_jira_instance'}
+ mock_client.return_value = 'Successful call!'
+
+ # Call the function
+
+ response = d.get_jira_client(
+ issue=mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.assert_called_with(mock_jira='mock_jira')
+ self.assertEqual('Successful call!', response)
+
+ @mock.patch('jira.client.JIRA')
+ def test_get_existing_legacy(self, client):
+ """
+ This tests '_get_existing_jira_issue_legacy' function
+ """
+ class MockIssue(object):
+ downstream = {'key': 'value'}
+ url = 'wat'
+ issue = MockIssue()
+ config = self.mock_config
+ # Ensure that we get results back from the jira client.
+ target1 = "target1"
+ client.return_value.search_issues = mock.MagicMock(return_value=[target1])
+ result = d._get_existing_jira_issue_legacy(jira.client.JIRA(), issue, config)
+ eq_(result, target1)
+
+ client.return_value.search_issues.assert_called_once_with(
+ "'External issue URL'='wat' AND 'key'='value' AND "
+ "(resolution is null OR resolution = Duplicate)",
+ )
+
+ @mock.patch('jira.client.JIRA')
+ def test_get_existing_newstyle(self, client):
+ config = self.mock_config
+
+ class MockIssue(object):
+ downstream = {'key': 'value'}
+ title = 'A title, a title...'
+ url = 'http://threebean.org'
+
+
+ issue = MockIssue()
+ mock_results_of_query = MagicMock()
+ mock_results_of_query.fields.summary = 'A title, a title...'
+
+ client.return_value.search_issues.return_value = [mock_results_of_query]
+ result = d._get_existing_jira_issue(jira.client.JIRA(), issue, config)
+ # Ensure that we get the mock_result_of_query as a result
+ self.assertEqual(result, mock_results_of_query)
+
+ client.return_value.search_issues.assert_called_once_with(
+ 'issueFunction in linkedIssuesOfRemote("Upstream issue") and '
+ 'issueFunction in linkedIssuesOfRemote("http://threebean.org")'
+ )
+
+ @mock.patch('jira.client.JIRA')
+ def test_upgrade_oldstyle_jira_issue(self, client):
+ config = self.mock_config
+
+ class MockIssue(object):
+ downstream = {'key': 'value'}
+ title = 'A title, a title...'
+ url = 'http://threebean.org'
+
+ downstream = mock.MagicMock()
+ issue = MockIssue()
+ client_obj = mock.MagicMock()
+ client.return_value = client_obj
+ d._upgrade_jira_issue(jira.client.JIRA(), downstream, issue, config)
+
+ remote = {
+ 'url': 'http://threebean.org',
+ 'title': 'Upstream issue',
+ }
+ client_obj.add_remote_link.assert_called_once_with(downstream.id, remote)
+
+
+ @mock.patch('jira.client.JIRA')
+ def test_assign_user(self, mock_client):
+ """
+ Test 'assign_user' function where remove_all flag is False
+ """
+ # Set up return values
+ mock_user = MagicMock()
+ mock_user.displayName = 'mock_assignee'
+ mock_user.key = 'mock_user_key'
+ mock_client.search_assignable_users_for_issues.return_value = [mock_user]
+ mock_client.assign_issue.return_value = True
+
+ # Call the assign user function
+ d.assign_user(
+ issue=self.mock_issue,
+ downstream=self.mock_downstream,
+ client=mock_client
+ )
+
+ # Assert that all calls mocked were called properly
+ self.mock_downstream.update({'assignee': {'name': 1234}})
+ mock_client.search_assignable_users_for_issues.assert_called_with(
+ 'mock_assignee',
+ project='mock_project'
+ )
+
+ @mock.patch('jira.client.JIRA')
+ def test_assign_user_with_owner(self, mock_client):
+ """
+ Test 'assign_user' function where remove_all flag is False
+ """
+ # Set up return values
+ mock_user = MagicMock()
+ mock_user.displayName = 'mock_assignee'
+ mock_user.key = 'mock_user_key'
+ mock_client.search_assignable_users_for_issues.return_value = []
+ mock_client.assign_issue.return_value = True
+
+ # Call the assign user function
+ d.assign_user(
+ issue=self.mock_issue,
+ downstream=self.mock_downstream,
+ client=mock_client
+ )
+
+ # Assert that all calls mocked were called properly
+ mock_client.assign_issue.assert_called_with(1234, 'mock_owner')
+ mock_client.search_assignable_users_for_issues.assert_called_with(
+ 'mock_assignee',
+ project='mock_project'
+ )
+
+ @mock.patch('jira.client.JIRA')
+ def test_assign_user_without_owner(self, mock_client):
+ """
+ Test 'assign_user' function where remove_all flag is False
+ """
+ # Set up return values
+ mock_user = MagicMock()
+ mock_user.displayName = 'mock_assignee'
+ mock_user.key = 'mock_user_key'
+ mock_client.search_assignable_users_for_issues.return_value = []
+ mock_client.assign_issue.return_value = True
+ self.mock_issue.downstream.pop('owner')
+
+ # Call the assign user function
+ d.assign_user(
+ issue=self.mock_issue,
+ downstream=self.mock_downstream,
+ client=mock_client
+ )
+
+ # Assert that all calls mocked were called properly
+ mock_client.assign_issue.assert_not_called()
+ mock_client.search_assignable_users_for_issues.assert_called_with(
+ 'mock_assignee',
+ project='mock_project'
+ )
+
+ @mock.patch('jira.client.JIRA')
+ def test_assign_user_remove_all(self, mock_client):
+ """
+ Test 'assign_user' function where remove_all flag is True
+ """
+ # Call the assign user function
+ d.assign_user(
+ issue=self.mock_issue,
+ downstream=self.mock_downstream,
+ client=mock_client,
+ remove_all=True
+ )
+
+ # Assert that all calls mocked were called properly
+ self.mock_downstream.update.assert_called_with(assignee={'name': ''})
+ mock_client.assign_issue.assert_not_called()
+ mock_client.search_assignable_users_for_issues.assert_not_called()
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + '_update_jira_issue')
+ @mock.patch(PATH + 'attach_link')
+ @mock.patch('jira.client.JIRA')
+ def test_create_jira_issue(self,
+ mock_client,
+ mock_attach_link,
+ mock_update_jira_issue,
+ mock_confluence_client):
+ """
+ Tests '_create_jira_issue' function
+ """
+ # Set up return values
+ mock_client.create_issue.return_value = self.mock_downstream
+ mock_client.fields.return_value = [
+ {'name': 'Epic Link', 'id': 'customfield_1'},
+ {'name': 'QA Contact', 'id': 'customfield_2'},
+ {'name': 'EXD-Service', 'id': 'customfield_3'},
+ ]
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ response = d._create_jira_issue(
+ client=mock_client,
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.create_issue.assert_called_with(
+ issuetype={'name': 'Fix'},
+ project={'key': 'mock_project'},
+ somecustumfield='somecustumvalue',
+ description='[1234] Upstream Reporter: mock_user \n Upstream issue status: Open\nUpstream description: {quote}mock_content{quote}',
+ summary='mock_title'
+ )
+ mock_attach_link.assert_called_with(
+ mock_client,
+ self.mock_downstream,
+ {
+ 'url': 'mock_url',
+ 'title': 'Upstream issue'
+ }
+ )
+ mock_update_jira_issue.assert_called_with(
+ self.mock_downstream,
+ self.mock_issue,
+ mock_client
+ )
+ self.mock_downstream.update.assert_any_call({'customfield_1': 'DUMMY-1234'})
+ self.mock_downstream.update.assert_any_call({'customfield_2': 'dummy@dummy.com'})
+ self.mock_downstream.update.assert_any_call(
+ {"customfield_3": {"value": "EXD-Project", "child": {"value": "EXD-Value"}}})
+ self.assertEqual(response, self.mock_downstream)
+ mock_client.add_comment.assert_not_called()
+ mock_confluence_client.update_stat_page.assert_called_with(
+ {'Misc. Fields': 3, 'Created Issues': 1, 'Descriptions': 1, 'Status': 1, 'Reporters': 1}
+ )
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + '_update_jira_issue')
+ @mock.patch(PATH + 'attach_link')
+ @mock.patch('jira.client.JIRA')
+ def test_create_jira_issue_failed_epic_link(self,
+ mock_client,
+ mock_attach_link,
+ mock_update_jira_issue,
+ mock_confluence_client):
+ """
+ Tests '_create_jira_issue' function where we fail updating the epic link
+ """
+ # Set up return values
+ mock_client.create_issue.return_value = self.mock_downstream
+ mock_client.fields.return_value = [
+ {'name': 'Epic Link', 'id': 'customfield_1'},
+ {'name': 'QA Contact', 'id': 'customfield_2'},
+ {'name': 'EXD-Service', 'id': 'customfield_3'},
+ ]
+ self.mock_downstream.update.side_effect = [JIRAError, 'success', 'success']
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ response = d._create_jira_issue(
+ client=mock_client,
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.create_issue.assert_called_with(
+ issuetype={'name': 'Fix'},
+ project={'key': 'mock_project'},
+ somecustumfield='somecustumvalue',
+ description='[1234] Upstream Reporter: mock_user \n Upstream issue status: Open\nUpstream description: {quote}mock_content{quote}',
+ summary='mock_title'
+ )
+ mock_attach_link.assert_called_with(
+ mock_client,
+ self.mock_downstream,
+ {
+ 'url': 'mock_url',
+ 'title': 'Upstream issue'
+ }
+ )
+ mock_update_jira_issue.assert_called_with(
+ self.mock_downstream,
+ self.mock_issue,
+ mock_client
+ )
+ self.mock_downstream.update.assert_any_call({'customfield_1': 'DUMMY-1234'})
+ self.mock_downstream.update.assert_any_call(
+ {'customfield_2': 'dummy@dummy.com'})
+ self.mock_downstream.update.assert_any_call(
+ {"customfield_3": {"value": "EXD-Project", "child": {"value": "EXD-Value"}}})
+ self.assertEqual(response, self.mock_downstream)
+ mock_client.add_comment.assert_called_with(self.mock_downstream, f"Error adding Epic-Link: DUMMY-1234")
+ mock_confluence_client.update_stat_page.assert_called_with(
+ {'Misc. Fields': 3, 'Created Issues': 1, 'Descriptions': 1, 'Status': 1, 'Reporters': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + '_update_jira_issue')
+ @mock.patch(PATH + 'attach_link')
+ @mock.patch('jira.client.JIRA')
+ def test_create_jira_issue_failed_exd_service(self,
+ mock_client,
+ mock_attach_link,
+ mock_update_jira_issue,
+ mock_confluence_client):
+ """
+ Tests '_create_jira_issue' function where we fail updating the EXD-Service field
+ """
+ # Set up return values
+ mock_client.create_issue.return_value = self.mock_downstream
+ mock_client.fields.return_value = [
+ {'name': 'Epic Link', 'id': 'customfield_1'},
+ {'name': 'QA Contact', 'id': 'customfield_2'},
+ {'name': 'EXD-Service', 'id': 'customfield_3'},
+ ]
+ self.mock_downstream.update.side_effect = ['success', 'success', JIRAError]
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ response = d._create_jira_issue(
+ client=mock_client,
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.create_issue.assert_called_with(
+ issuetype={'name': 'Fix'},
+ project={'key': 'mock_project'},
+ somecustumfield='somecustumvalue',
+ description='[1234] Upstream Reporter: mock_user \n Upstream issue status: Open\nUpstream description: {quote}mock_content{quote}',
+ summary='mock_title'
+ )
+ mock_attach_link.assert_called_with(
+ mock_client,
+ self.mock_downstream,
+ {
+ 'url': 'mock_url',
+ 'title': 'Upstream issue'
+ }
+ )
+ mock_update_jira_issue.assert_called_with(
+ self.mock_downstream,
+ self.mock_issue,
+ mock_client
+ )
+ self.mock_downstream.update.assert_any_call({'customfield_1': 'DUMMY-1234'})
+ self.mock_downstream.update.assert_any_call(
+ {'customfield_2': 'dummy@dummy.com'})
+ self.mock_downstream.update.assert_any_call(
+ {"customfield_3": {"value": "EXD-Project", "child": {"value": "EXD-Value"}}})
+ self.assertEqual(response, self.mock_downstream)
+ mock_client.add_comment.assert_called_with(self.mock_downstream,
+ f"Error adding EXD-Service field.\n"
+ f"Project: {self.mock_issue.downstream['EXD-Service']['guild']}\n"
+ f"Value: {self.mock_issue.downstream['EXD-Service']['value']}")
+ mock_confluence_client.update_stat_page.assert_called_with(
+ {'Misc. Fields': 3, 'Created Issues': 1, 'Descriptions': 1, 'Status': 1, 'Reporters': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + '_update_jira_issue')
+ @mock.patch(PATH + 'attach_link')
+ @mock.patch('jira.client.JIRA')
+ def test_create_jira_issue_no_updates(self,
+ mock_client,
+ mock_attach_link,
+ mock_update_jira_issue,
+ mock_confluence_client):
+ """
+ Tests '_create_jira_issue' function where we have
+ no updates
+ """
+ # Set up return values
+ mock_client.create_issue.return_value = self.mock_downstream
+ self.mock_issue.downstream['issue_updates'] = []
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ response = d._create_jira_issue(
+ client=mock_client,
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.create_issue.assert_called_with(
+ issuetype={'name': 'Fix'},
+ project={'key': 'mock_project'},
+ somecustumfield='somecustumvalue',
+ description='[1234] Upstream Reporter: mock_user \n ',
+ summary='mock_title'
+ )
+ mock_attach_link.assert_called_with(
+ mock_client,
+ self.mock_downstream,
+ {
+ 'url': 'mock_url',
+ 'title': 'Upstream issue'
+ }
+ )
+ mock_update_jira_issue.assert_called_with(
+ self.mock_downstream,
+ self.mock_issue,
+ mock_client
+ )
+ self.assertEqual(response, self.mock_downstream)
+ mock_client.add_comment.assert_not_called()
+ mock_confluence_client.update_stat_page.assert_called_with({'Misc. Fields': 1, 'Created Issues': 1, 'Reporters': 1})
+
+
+ @mock.patch(PATH + 'get_jira_client')
+ @mock.patch(PATH + '_get_existing_jira_issue')
+ @mock.patch(PATH + '_update_jira_issue')
+ @mock.patch(PATH + '_create_jira_issue')
+ @mock.patch('jira.client.JIRA')
+ @mock.patch(PATH + '_get_existing_jira_issue_legacy')
+ @mock.patch(PATH + 'check_jira_status')
+ def test_sync_with_jira_matching(self,
+ mock_check_jira_status,
+ mock_existing_jira_issue_legacy,
+ mock_client,
+ mock_create_jira_issue,
+ mock_update_jira_issue,
+ mock_existing_jira_issue,
+ mock_get_jira_client):
+ """
+ Tests 'sync_with_jira' function where we do find a matching issue
+ This assumes we're not using the legacy matching anymore
+ """
+ # Set up return values
+ mock_get_jira_client.return_value = mock_client
+ mock_existing_jira_issue.return_value = self.mock_downstream
+ mock_check_jira_status.return_value = True
+
+ # Call the function
+ d.sync_with_jira(
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert all calls were made correctly
+ mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config)
+ mock_update_jira_issue.assert_called_with(self.mock_downstream, self.mock_issue, mock_client)
+ mock_create_jira_issue.assert_not_called()
+ mock_existing_jira_issue_legacy.assert_not_called()
+
+ @mock.patch(PATH + 'get_jira_client')
+ @mock.patch(PATH + '_get_existing_jira_issue')
+ @mock.patch(PATH + '_update_jira_issue')
+ @mock.patch(PATH + '_create_jira_issue')
+ @mock.patch('jira.client.JIRA')
+ @mock.patch(PATH + '_get_existing_jira_issue_legacy')
+ @mock.patch(PATH + 'check_jira_status')
+ def test_sync_with_jira_down(self,
+ mock_check_jira_status,
+ mock_existing_jira_issue_legacy,
+ mock_client,
+ mock_create_jira_issue,
+ mock_update_jira_issue,
+ mock_existing_jira_issue,
+ mock_get_jira_client):
+ """
+ Tests 'sync_with_jira' function where the JIRA scriptrunner is down
+ """
+ # Set up return values
+ mock_get_jira_client.return_value = mock_client
+ mock_existing_jira_issue.return_value = self.mock_downstream
+ mock_check_jira_status.return_value = False
+
+ # Call the function
+ with self.assertRaises(JIRAError):
+ d.sync_with_jira(
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert all calls were made correctly
+ mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config)
+ mock_update_jira_issue.assert_not_called()
+ mock_create_jira_issue.assert_not_called()
+ mock_existing_jira_issue_legacy.assert_not_called()
+
+ @mock.patch(PATH + 'get_jira_client')
+ @mock.patch(PATH + '_get_existing_jira_issue')
+ @mock.patch(PATH + '_update_jira_issue')
+ @mock.patch(PATH + '_create_jira_issue')
+ @mock.patch('jira.client.JIRA')
+ @mock.patch(PATH + '_get_existing_jira_issue_legacy')
+ @mock.patch(PATH + 'check_jira_status')
+ def test_sync_with_jira_no_matching(self,
+ mock_check_jira_status,
+ mock_existing_jira_issue_legacy,
+ mock_client,
+ mock_create_jira_issue,
+ mock_update_jira_issue,
+ mock_existing_jira_issue,
+ mock_get_jira_client):
+ """
+ Tests 'sync_with_jira' function where we do NOT find a matching issue
+ This assumes we're not using the legacy matching anymore
+ """
+ # Set up return values
+ mock_get_jira_client.return_value = mock_client
+ mock_existing_jira_issue.return_value = None
+ mock_check_jira_status.return_value = True
+
+ # Call the function
+ d.sync_with_jira(
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert all calls were made correctly
+ mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config)
+ mock_update_jira_issue.assert_not_called()
+ mock_create_jira_issue.assert_called_with(mock_client, self.mock_issue, self.mock_config)
+ mock_existing_jira_issue_legacy.assert_not_called()
+
+ @mock.patch(PATH + '_update_title')
+ @mock.patch(PATH + '_update_description')
+ @mock.patch(PATH + '_update_comments')
+ @mock.patch(PATH + '_update_tags')
+ @mock.patch(PATH + '_update_fixVersion')
+ @mock.patch(PATH + '_update_transition')
+ @mock.patch(PATH + '_update_assignee')
+ @mock.patch(PATH + '_update_on_close')
+ @mock.patch('jira.client.JIRA')
+ def test_update_jira_issue(self,
+ mock_client,
+ mock_update_on_close,
+ mock_update_assignee,
+ mock_update_transition,
+ mock_update_fixVersion,
+ mock_update_tags,
+ mock_update_comments,
+ mock_update_description,
+ mock_update_title):
+ """
+ This tests '_update_jira_issue' function
+ """
+ # Call the function
+ d._update_jira_issue(
+ existing=self.mock_downstream,
+ issue=self.mock_issue,
+ client=mock_client
+ )
+
+ # Assert all calls were made correctly
+ mock_update_comments.assert_called_with(
+ mock_client,
+ self.mock_downstream,
+ self.mock_issue
+ )
+ mock_update_tags.assert_called_with(
+ self.mock_updates,
+ self.mock_downstream,
+ self.mock_issue
+ )
+ mock_update_fixVersion.assert_called_with(
+ self.mock_updates,
+ self.mock_downstream,
+ self.mock_issue,
+ mock_client,
+ )
+ mock_update_description.assert_called_with(
+ self.mock_downstream,
+ self.mock_issue
+ )
+ mock_update_title.assert_called_with(
+ self.mock_issue,
+ self.mock_downstream
+ )
+ mock_update_transition.assert_called_with(
+ mock_client,
+ self.mock_downstream,
+ self.mock_issue
+ )
+ mock_update_on_close.assert_called_once()
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch('jira.client.JIRA')
+ def test_update_transition_JIRAError(self,
+ mock_client,
+ mock_confluence_client):
+ """
+ This function tests the '_update_transition' function where Upstream issue status
+ s not in existing.fields.description and transitioning the issue throws an error
+ """
+ # Set up return values
+ self.mock_issue.status = 'Closed'
+ self.mock_downstream.fields.description = ''
+ mock_client.transitions.return_value = [{'name': 'CUSTOM TRANSITION', 'id': '1234'}]
+ mock_client.transition_issue.side_effect = JIRAError
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_transition(
+ client=mock_client,
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with({'description': 'Upstream issue status: Closed\n'})
+ mock_client.transitions.assert_called_with(self.mock_downstream)
+ mock_client.transition_issue.assert_called_with(self.mock_downstream, 1234)
+ mock_confluence_client.update_stat_page.assert_called_with({'Status': 1})
+
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch('jira.client.JIRA')
+ def test_update_transition_not_found(self,
+ mock_client,
+ mock_confluence_client):
+ """
+ This function tests the '_update_transition' function where Upstream issue status
+ not in existing.fields.description and we can't find the appropriate closed status
+ """
+ # Set up return values
+ self.mock_issue.status = 'Closed'
+ self.mock_issue.downstream['transition'] = 'bad_transition'
+ self.mock_downstream.fields.description = ''
+ mock_client.transitions.return_value = [{'name': 'CUSTOM TRANSITION', 'id': '1234'}]
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_transition(
+ client=mock_client,
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with({'description': 'Upstream issue status: Closed\n'})
+ mock_client.transitions.assert_called_with(self.mock_downstream)
+ mock_client.transition_issue.assert_called_with(self.mock_downstream, 1234)
+ mock_confluence_client.update_stat_page.assert_called_with({'Transition': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch('jira.client.JIRA')
+ def test_update_transition_successful(self,
+ mock_client,
+ mock_confluence_client):
+ """
+ This function tests the '_update_transition' function where everything goes smoothly!
+ """
+ # Set up return values
+ self.mock_issue.status = 'Closed'
+ self.mock_downstream.fields.description = '[test] Upstream issue status: Open'
+ mock_client.transitions.return_value = [{'name': 'CUSTOM TRANSITION', 'id': '1234'}]
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_transition(
+ client=mock_client,
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with({'description': 'Upstream issue status: Closed'})
+ mock_client.transitions.assert_called_with(self.mock_downstream)
+ mock_client.transition_issue.assert_called_with(self.mock_downstream, 1234)
+ mock_confluence_client.update_stat_page.assert_called_with({'Transition': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + '_comment_format')
+ @mock.patch(PATH + '_comment_matching')
+ @mock.patch('jira.client.JIRA')
+ def test_update_comments(self,
+ mock_client,
+ mock_comment_matching,
+ mock_comment_format,
+ mock_confluence_client):
+ """
+ This function tests the 'update_comments' function
+ """
+ # Set up return values
+ mock_client.comments.return_value = 'mock_comments'
+ mock_comment_matching.return_value = ['mock_comments_d']
+ mock_comment_format.return_value = 'mock_comment_body'
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_comments(
+ client=mock_client,
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ mock_client.comments.assert_called_with(self.mock_downstream)
+ mock_comment_matching.assert_called_with(self.mock_issue.comments, 'mock_comments')
+ mock_comment_format.assert_called_with('mock_comments_d')
+ mock_client.add_comment.assert_called_with(self.mock_downstream, 'mock_comment_body')
+ mock_confluence_client.update_stat_page.assert_called_with({'Comments': 1})
+
+ def test_update_fixVersion_JIRAError(self):
+ """
+ This function tests the 'update_fixVersion' function where updating the downstream
+ issue throws an error
+ """
+ # Set up return values
+ self.mock_downstream.update.side_effect = JIRAError
+ self.mock_downstream.fields.fixVersions = []
+ mock_client = MagicMock()
+
+ # Call the function
+ d._update_fixVersion(
+ updates=self.mock_updates,
+ existing=self.mock_downstream,
+ issue=self.mock_issue,
+ client=mock_client,
+ )
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with(
+ {'fixVersions': [{'name': 'fixVersion3'}, {'name': 'fixVersion4'}]})
+ mock_client.add_comment(self.mock_downstream, f"Error updating fixVersion: {self.mock_issue.fixVersion}")
+
+
+ def test_update_fixVersion_no_api_call(self):
+ """
+ This function tests the 'update_fixVersion' function existing labels are the same
+ and thus no API call should be made
+ """
+ # Set up return values
+ self.mock_downstream.update.side_effect = JIRAError
+ mock_client = MagicMock()
+
+ # Call the function
+ d._update_fixVersion(
+ updates=self.mock_updates,
+ existing=self.mock_downstream,
+ issue=self.mock_issue,
+ client=mock_client,
+ )
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_not_called()
+ mock_client.add_comment.assert_not_called()
+
+ @mock.patch(PATH + 'confluence_client')
+ def test_update_fixVersion_successful(self,
+ mock_confluence_client):
+ """
+ This function tests the 'update_fixVersion' function where everything goes smoothly!
+ """
+ # Set up return values
+ self.mock_downstream.fields.fixVersions = []
+ mock_client = MagicMock()
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_fixVersion(
+ updates=self.mock_updates,
+ existing=self.mock_downstream,
+ issue=self.mock_issue,
+ client=mock_client,
+ )
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with(
+ {'fixVersions': [{'name': 'fixVersion3'}, {'name': 'fixVersion4'}]})
+ mock_client.add_comment.assert_not_called()
+ mock_confluence_client.update_stat_page.assert_called_with({'FixVersion': 2})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'assign_user')
+ @mock.patch('jira.client.JIRA')
+ def test_update_assignee_assignee(self,
+ mock_client,
+ mock_assign_user,
+ mock_confluence_client):
+ """
+ This function tests the 'update_assignee' function where issue.assignee exists
+ """
+ # Call the function
+ d._update_assignee(
+ client=mock_client,
+ existing=self.mock_downstream,
+ issue=self.mock_issue,
+ updates=[{'assignee': {'overwrite': True}}]
+ )
+
+ # Assert all calls were made correctly
+ mock_assign_user.assert_called_with(
+ mock_client,
+ self.mock_issue,
+ self.mock_downstream
+ )
+ mock_confluence_client.update_stat_page.assert_called_with({'Assignee': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'assign_user')
+ @mock.patch('jira.client.JIRA')
+ def test_update_assignee_no_assignee(self,
+ mock_client,
+ mock_assign_user,
+ mock_confluence_client):
+ """
+ This function tests the '_update_assignee' function where issue.assignee does not exist
+ """
+ # Set up return values
+ self.mock_issue.assignee = None
+
+ # Call the function
+ d._update_assignee(
+ client=mock_client,
+ existing=self.mock_downstream,
+ issue=self.mock_issue,
+ updates=[{'assignee': {'overwrite': True}}]
+ )
+
+ # Assert all calls were made correctly
+ mock_assign_user.assert_called_with(
+ mock_client,
+ self.mock_issue,
+ self.mock_downstream,
+ remove_all=True
+ )
+ mock_confluence_client.update_stat_page.assert_called_with({'Assignee': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'assign_user')
+ @mock.patch('jira.client.JIRA')
+ def test_update_assignee_no_overwrite(self,
+ mock_client,
+ mock_assign_user,
+ mock_confluence_client):
+ """
+ This function tests the '_update_assignee' function where overwrite is false
+ """
+ # Set up return values
+ self.mock_downstream.fields.assignee = None
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_assignee(
+ client=mock_client,
+ existing=self.mock_downstream,
+ issue=self.mock_issue,
+ updates=[{'assignee': {'overwrite': False}}]
+ )
+
+ # Assert all calls were made correctly
+ mock_assign_user.assert_called_with(
+ mock_client,
+ self.mock_issue,
+ self.mock_downstream
+ )
+ mock_confluence_client.update_stat_page.assert_called_with({'Assignee': 1})
+
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'verify_tags')
+ @mock.patch(PATH + '_label_matching')
+ def test_update_tags(self,
+ mock_label_matching,
+ mock_verify_tags,
+ mock_confluence_client):
+ """
+ This function tests the '_update_tags' function
+ """
+ # Set up return values
+ mock_label_matching.return_value = 'mock_updated_labels'
+ mock_verify_tags.return_value = ['mock_verified_tags']
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_tags(
+ updates=self.mock_updates,
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ mock_label_matching.assert_called_with(
+ self.mock_issue.tags,
+ self.mock_downstream.fields.labels
+ )
+ mock_verify_tags.assert_called_with('mock_updated_labels')
+ self.mock_downstream.update.assert_called_with({'labels': ['mock_verified_tags']})
+ mock_confluence_client.update_stat_page.assert_called_with({'Tags': 1})
+
+ @mock.patch(PATH + 'verify_tags')
+ @mock.patch(PATH + '_label_matching')
+ def test_update_tags_no_api_call(self,
+ mock_label_matching,
+ mock_verify_tags):
+ """
+ This function tests the '_update_tags' function where the existing tags are the same
+ as the new ones
+ """
+ # Set up return values
+ mock_label_matching.return_value = 'mock_updated_labels'
+ mock_verify_tags.return_value = ['tag3', 'tag4']
+
+ # Call the function
+ d._update_tags(
+ updates=self.mock_updates,
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ mock_label_matching.assert_called_with(
+ self.mock_issue.tags,
+ self.mock_downstream.fields.labels
+ )
+ mock_verify_tags.assert_called_with('mock_updated_labels')
+ self.mock_downstream.update.assert_not_called()
+
+ @mock.patch(PATH + 'confluence_client')
+ def test_update_description_update(self,
+ mock_confluence_client):
+ """
+ This function tests '_update_description' where we just have to update the contents of the description
+ """
+ # Set up return values
+ self.mock_downstream.fields.description = 'Upstream description: {quote} test {quote}'
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_description(
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with(
+ {'description': 'Upstream description: {quote}mock_content{quote}'})
+ mock_confluence_client.update_stat_page.assert_called_with({'Description': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ def test_update_description_add_field(self,
+ mock_confluence_client):
+ """
+ This function tests '_update_description' where we just have to add a description field
+ """
+ # Set up return values
+ self.mock_downstream.fields.description = '[123] Upstream Reporter: mock_user \n' \
+ 'Upstream description: {quote} test {quote}'
+
+ # Call the function
+ d._update_description(
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with(
+ {'description': '[123] Upstream Reporter: mock_user \n'
+ 'Upstream description: {quote}mock_content{quote}'})
+ mock_confluence_client.update_stat_page.assert_called_with({'Description': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'datetime')
+ def test_update_description_add_reporter(self,
+ mock_datetime,
+ mock_confluence_client):
+ """
+ This function tests '_update_description' where we have to add a description and upstream reporter field
+ """
+ # Set up return values
+ self.mock_downstream.fields.description = '[123] Upstream issue status: Open\n'
+ self.mock_issue.status = 'Open'
+ self.mock_issue.id = '123'
+ self.mock_issue.reporter = {'fullname': 'mock_user'}
+ mock_datetime.today.return_value = self.mock_today
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_description(
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with(
+ {'description': '[mock_today] Upstream issue status: Open\n[123]'
+ ' Upstream Reporter: mock_user\nUpstream description:'
+ ' {quote}mock_content{quote}\n'})
+ mock_confluence_client.update_stat_page.assert_called_with({'Description': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ def test_update_description_add_reporter_no_status(self,
+ mock_confluence_client):
+ """
+ This function tests '_update_description' where we have to add reporter and description without status
+ """
+ # Set up return values
+ self.mock_downstream.fields.description = ''
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d._update_description(
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with(
+ {'description': '[1234] Upstream Reporter: mock_user \n'
+ 'Upstream description: {quote}mock_content{quote} \n '})
+ mock_confluence_client.update_stat_page.assert_called_with({'Description': 1})
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'datetime')
+ def test_update_description_add_description(self,
+ mock_datetime,
+ mock_confluence_client):
+ """
+ This function tests '_update_description' where we have a reporter and status already
+ """
+ # Set up return values
+ self.mock_downstream.fields.description = '[123] Upstream issue status: Open\n' \
+ '[123] Upstream Reporter: mock_user\n'
+ self.mock_issue.status = 'Open'
+ self.mock_issue.id = '123'
+ self.mock_issue.reporter = {'fullname': 'mock_user'}
+ mock_datetime.today.return_value = self.mock_today
+
+ # Call the function
+ d._update_description(
+ existing=self.mock_downstream,
+ issue=self.mock_issue
+ )
+
+ # Assert all calls were made correctly
+ self.mock_downstream.update.assert_called_with(
+ {'description': '[mock_today] Upstream issue status: Open\n'
+ '[123] Upstream Reporter: mock_user\n'
+ 'Upstream description: {quote}mock_content{quote}\n'})
+ mock_confluence_client.update_stat_page.assert_called_with({'Description': 1})
+
+ def test_verify_tags(self):
+ """
+ This function tests 'verify_tags' function
+ """
+ # Call the function
+ response = d.verify_tags(
+ tags=['this is a tag']
+ )
+
+ # Assert everything was called correctly
+ self.assertEqual(response, ['this_is_a_tag'])
+
+ @mock.patch(PATH + 'get_jira_client')
+ @mock.patch(PATH + '_matching_jira_issue_query')
+ @mock.patch(PATH + '_close_as_duplicate')
+ @mock.patch('jira.client.JIRA')
+ @mock.patch(PATH + 'check_jira_status')
+ def test_close_duplicates_no_matching(self,
+ mock_check_jira_status,
+ mock_client,
+ mock_close_as_duplicate,
+ mock_matching_jira_issue_query,
+ mock_get_jira_client):
+ """
+ This tests 'close_duplicates' function where len(results) <= 1
+ """
+ # Set up return values
+ mock_get_jira_client.return_value = mock_client
+ mock_matching_jira_issue_query.return_value = ['only_one_response']
+ mock_check_jira_status.return_value = True
+
+ # Call the function
+ response = d.close_duplicates(
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config)
+ mock_matching_jira_issue_query.assert_called_with(
+ mock_client,
+ self.mock_issue,
+ self.mock_config,
+ free=True
+ )
+ mock_close_as_duplicate.assert_not_called()
+ self.assertEqual(None, response)
+
+ @mock.patch(PATH + 'get_jira_client')
+ @mock.patch(PATH + '_matching_jira_issue_query')
+ @mock.patch(PATH + '_close_as_duplicate')
+ @mock.patch('jira.client.JIRA')
+ @mock.patch(PATH + 'check_jira_status')
+ def test_close_duplicates(self,
+ mock_check_jira_status,
+ mock_client,
+ mock_close_as_duplicate,
+ mock_matching_jira_issue_query,
+ mock_get_jira_client):
+ """
+ This tests 'close_duplicates' function where len(results) > 1
+ """
+ # Set up return values
+ mock_get_jira_client.return_value = mock_client
+ mock_item = MagicMock()
+ mock_item.fields.created = 1
+ mock_matching_jira_issue_query.return_value = [mock_item, mock_item, mock_item]
+ mock_check_jira_status.return_value = True
+
+ # Call the function
+ response = d.close_duplicates(
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_get_jira_client.assert_called_with(self.mock_issue, self.mock_config)
+ mock_matching_jira_issue_query.assert_called_with(
+ mock_client,
+ self.mock_issue,
+ self.mock_config,
+ free=True
+ )
+ mock_close_as_duplicate.assert_called_with(
+ mock_client,
+ mock_item,
+ mock_item,
+ self.mock_config
+ )
+ self.assertEqual(None, response)
+
+ @mock.patch('jira.client.JIRA')
+ def test_close_as_duplicate_errors(self,
+ mock_client):
+ """
+ This tests '_close_as_duplicate' function where client.transition_issue throws an exception
+ """
+ # Set up return values
+ class HTTPExceptionHelper():
+ text = "Field 'resolution' cannot be set"
+
+ class HTTPException(Exception):
+ response = HTTPExceptionHelper
+
+ mock_duplicate = MagicMock()
+ mock_duplicate.permalink.return_value = 'mock_url'
+ mock_duplicate.key = 'mock_key'
+ mock_keeper = MagicMock()
+ mock_keeper.key = 'mock_key'
+ mock_keeper.permalink.return_value = 'mock_url'
+ mock_client.transitions.return_value = [{'name': 'Dropped', 'id': '1234'}]
+ mock_client.comments.return_value = []
+ mock_client.transition_issue.side_effect = HTTPException
+
+ # Call the function
+ d._close_as_duplicate(
+ client=mock_client,
+ duplicate=mock_duplicate,
+ keeper=mock_keeper,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.comments.assert_any_call(mock_keeper)
+ mock_client.comments.assert_any_call(mock_duplicate)
+ mock_client.transitions.assert_called_with(mock_duplicate)
+ mock_client.add_comment.assert_any_call(mock_duplicate, 'Marking as duplicate of mock_key')
+ mock_client.add_comment.assert_any_call(mock_keeper, 'mock_key is a duplicate.')
+ mock_client.transition_issue.assert_any_call(
+ mock_duplicate,
+ '1234',
+ resolution={'name': 'Duplicate'}
+ )
+ mock_client.transition_issue.assert_any_call(
+ mock_duplicate,
+ '1234'
+ )
+
+ @mock.patch('jira.client.JIRA')
+ def test_close_as_duplicate(self,
+ mock_client):
+ """
+ This tests '_close_as_duplicate' function where everything goes smoothly
+ """
+ # Set up return values
+ mock_duplicate = MagicMock()
+ mock_duplicate.permalink.return_value = 'mock_url'
+ mock_duplicate.key = 'mock_key'
+ mock_keeper = MagicMock()
+ mock_keeper.key = 'mock_key'
+ mock_keeper.permalink.return_value = 'mock_url'
+ mock_client.transitions.return_value = [{'name': 'Dropped', 'id': '1234'}]
+ mock_client.comments.return_value = []
+
+ # Call the function
+ d._close_as_duplicate(
+ client=mock_client,
+ duplicate=mock_duplicate,
+ keeper=mock_keeper,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ mock_client.comments.assert_any_call(mock_keeper)
+ mock_client.comments.assert_any_call(mock_duplicate)
+ mock_client.transitions.assert_called_with(mock_duplicate)
+ mock_client.add_comment.assert_any_call(mock_duplicate, 'Marking as duplicate of mock_key')
+ mock_client.add_comment.assert_any_call(mock_keeper, 'mock_key is a duplicate.')
+ mock_client.transition_issue.assert_called_with(
+ mock_duplicate,
+ '1234',
+ resolution={'name': 'Duplicate'}
+ )
+
+ @mock.patch(PATH + 'alert_user_of_duplicate_issues')
+ @mock.patch(PATH + 'find_username')
+ @mock.patch(PATH + 'check_comments_for_duplicate')
+ @mock.patch('jira.client.JIRA')
+ def test_matching_jira_issue_query(self,
+ mock_client,
+ mock_check_comments_for_duplicates,
+ mock_find_username,
+ mock_alert_user_of_duplicate_issues):
+ """
+ This tests '_matching_jira_query' function
+ """
+ # Set up return values
+ mock_downstream_issue = MagicMock()
+ self.mock_issue.upstream_title = 'mock_upstream_title'
+ mock_downstream_issue.fields.description = self.mock_issue.id
+ bad_downstream_issue = MagicMock()
+ bad_downstream_issue.fields.description = 'bad'
+ bad_downstream_issue.fields.summary = 'bad'
+ mock_client.search_issues.return_value = [mock_downstream_issue, bad_downstream_issue]
+ mock_check_comments_for_duplicates.return_value = True
+ mock_find_username.return_value = 'mock_username'
+ mock_alert_user_of_duplicate_issues.return_value = True
+
+ # Call the function
+ response = d._matching_jira_issue_query(
+ client=mock_client,
+ issue=self.mock_issue,
+ config=self.mock_config
+ )
+
+ # Assert everything was called correctly
+ self.assertEqual(response, [mock_downstream_issue])
+ mock_alert_user_of_duplicate_issues.assert_called_with(
+ self.mock_issue,
+ [mock_downstream_issue],
+ mock_client.search_issues.return_value,
+ self.mock_config,
+ mock_client
+ )
+ mock_client.search_issues.assert_called_with(
+ 'issueFunction in linkedIssuesOfRemote("Upstream issue")'
+ ' and issueFunction in linkedIssuesOfRemote("mock_url")')
+ mock_check_comments_for_duplicates.assert_called_with(
+ mock_client,
+ mock_downstream_issue,
+ 'mock_username'
+ )
+ mock_find_username.assert_called_with(
+ self.mock_issue,
+ self.mock_config
+ )
+
+ @mock.patch(PATH + 'jinja2')
+ @mock.patch(PATH + 'send_mail')
+ @mock.patch('jira.client.JIRA')
+ def test_alert_user(self,
+ mock_client,
+ mock_mailer,
+ mock_jinja,):
+ """
+ This tests 'alert_user_of_duplicate_issues' function
+ """
+ # Set up return values
+ mock_downstream_issue = MagicMock()
+ mock_downstream_issue.key = 'mock_key'
+ bad_downstream_issue = MagicMock()
+ bad_downstream_issue.key = 'mock_key'
+ bad_downstream_issue.fields.status.name = 'To Do'
+ mock_results_of_query = [mock_downstream_issue, bad_downstream_issue]
+ mock_search_user_result = MagicMock()
+ mock_search_user_result.displayName = 'mock_name'
+ mock_search_user_result.emailAddress = 'mock_email'
+ mock_client.search_users.return_value = [mock_search_user_result]
+ mock_template = MagicMock(name='template')
+ mock_template.render.return_value = 'mock_html_text'
+ mock_template_env = MagicMock(name='templateEnv')
+ mock_template_env.get_template.return_value = mock_template
+ mock_jinja.Environment.return_value = mock_template_env
+
+ # Call the function
+ d.alert_user_of_duplicate_issues(
+ issue=self.mock_issue,
+ final_result=[mock_downstream_issue],
+ results_of_query=mock_results_of_query,
+ config=self.mock_config,
+ client=mock_client
+ )
+
+ # Assert everything was called correctly
+ mock_client.search_users.assert_any_call('mock_owner')
+ mock_client.search_users.assert_any_call('mock_admin')
+ mock_template.render.assert_called_with(
+ admins=[{'name': 'mock_name', 'email': 'mock_email'}],
+ duplicate_issues=[{'url': 'mock_server/browse/mock_key', 'title': 'mock_key'}],
+ issue=self.mock_issue,
+ selected_issue={'url': 'mock_server/browse/mock_key', 'title': 'mock_key'},
+ user={'name': 'mock_name', 'email': 'mock_email'})
+ mock_mailer().send.asset_called_with('test')
+
+ @mock.patch(PATH + 'jinja2')
+ @mock.patch(PATH + 'send_mail')
+ @mock.patch('jira.client.JIRA')
+ def test_alert_user_multiple_users(self,
+ mock_client,
+ mock_mailer,
+ mock_jinja, ):
+ """
+ This tests 'alert_user_of_duplicate_issues' function
+ where searching returns multiple users
+ """
+ # Set up return values
+ mock_downstream_issue = MagicMock()
+ mock_downstream_issue.key = 'mock_key'
+ bad_downstream_issue = MagicMock()
+ bad_downstream_issue.key = 'mock_key'
+ bad_downstream_issue.fields.status.name = 'To Do'
+ mock_results_of_query = [mock_downstream_issue, bad_downstream_issue]
+ mock_search_user_result1 = MagicMock()
+ mock_search_user_result1.displayName = 'bad_name'
+ mock_search_user_result1.emailAddress = 'bad_email'
+ mock_search_user_result1.key = 'bad_owner'
+ mock_search_user_result2 = MagicMock()
+ mock_search_user_result2.displayName = 'mock_name'
+ mock_search_user_result2.emailAddress = 'mock_email'
+ mock_search_user_result2.key = 'mock_owner'
+ mock_client.search_users.return_value = [mock_search_user_result1, mock_search_user_result2]
+ mock_template = MagicMock(name='template')
+ mock_template.render.return_value = 'mock_html_text'
+ mock_template_env = MagicMock(name='templateEnv')
+ mock_template_env.get_template.return_value = mock_template
+ mock_jinja.Environment.return_value = mock_template_env
+
+ # Call the function
+ d.alert_user_of_duplicate_issues(
+ issue=self.mock_issue,
+ final_result=[mock_downstream_issue],
+ results_of_query=mock_results_of_query,
+ config=self.mock_config,
+ client=mock_client
+ )
+
+ # Assert everything was called correctly
+ mock_client.search_users.assert_any_call('mock_owner')
+ mock_client.search_users.assert_any_call('mock_admin')
+ mock_template.render.assert_called_with(
+ admins=[{'name': 'mock_name', 'email': 'mock_email'}],
+ duplicate_issues=[{'url': 'mock_server/browse/mock_key', 'title': 'mock_key'}],
+ issue=self.mock_issue,
+ selected_issue={'url': 'mock_server/browse/mock_key', 'title': 'mock_key'},
+ user={'name': 'mock_name', 'email': 'mock_email'})
+ mock_mailer().send.asset_called_with('test')
+
+ def test_find_username(self):
+ """
+ Tests 'find_username' function
+ """
+ # Call the function
+ response = d.find_username(
+ self.mock_issue,
+ self.mock_config
+ )
+
+ # Assert everything was called correctly
+ self.assertEqual(response, 'mock_user')
+
+ @mock.patch('jira.client.JIRA')
+ def test_check_comments_for_duplicates(self,
+ mock_client):
+ """
+ Tests 'check_comments_for_duplicates' function
+ """
+ # Set up return values
+ mock_comment = MagicMock()
+ mock_comment.body = 'Marking as duplicate of TEST-1234'
+ mock_comment.author.name = 'mock_user'
+ mock_client.comments.return_value = [mock_comment]
+ mock_client.issue.return_value = 'Successful Call!'
+
+ # Call the function
+ response = d.check_comments_for_duplicate(
+ client=mock_client,
+ result=self.mock_downstream,
+ username='mock_user'
+ )
+
+ # Assert everything was called correctly
+ self.assertEqual(response, 'Successful Call!')
+ mock_client.comments.assert_called_with(self.mock_downstream)
+ mock_client.issue.assert_called_with('TEST-1234')
+
+ @mock.patch(PATH + '_comment_format')
+ @mock.patch(PATH + '_comment_format_legacy')
+ def test_find_comment_in_jira_legacy(self,
+ mock_comment_format_legacy,
+ mock_comment_format):
+ """
+ This function tests '_find_comment_in_jira' where we find a legacy comment
+ """
+ # Set up return values
+ mock_comment_format.return_value = 'mock_comment_body'
+ mock_comment_format_legacy.return_value = 'mock_legacy_comment_body'
+ mock_jira_comment = MagicMock()
+ mock_jira_comment.raw = {'body': 'mock_legacy_comment_body'}
+ mock_comment = {
+ 'id': '12345',
+ 'date_created': datetime(2019, 8, 8)
+ }
+
+ # Call the function
+ response = d._find_comment_in_jira(mock_comment, [mock_jira_comment])
+
+ # Assert everything was called correctly
+ mock_comment_format_legacy.assert_called_with(mock_comment)
+ mock_comment_format.assert_called_with(mock_comment)
+ self.assertEqual(response, mock_jira_comment)
+
+ @mock.patch(PATH + '_comment_format')
+ @mock.patch(PATH + '_comment_format_legacy')
+ def test_find_comment_in_jira_id(self,
+ mock_comment_format_legacy,
+ mock_comment_format):
+ """
+ This function tests '_find_comment_in_jira' where we match an ID
+ """
+ # Set up return values
+ mock_comment_format.return_value = 'mock_comment_body'
+ mock_comment_format_legacy.return_value = 'mock_legacy_comment_body'
+ mock_jira_comment = MagicMock()
+ mock_jira_comment.raw = {'body': '12345'}
+ mock_comment = {
+ 'id': '12345',
+ 'date_created': datetime(2019, 8, 8)
+ }
+
+ # Call the function
+ response = d._find_comment_in_jira(mock_comment, [mock_jira_comment])
+
+ # Assert everything was called correctly
+ mock_comment_format_legacy.assert_called_with(mock_comment)
+ mock_comment_format.assert_called_with(mock_comment)
+ self.assertEqual(response, mock_jira_comment)
+
+ @mock.patch(PATH + '_comment_format')
+ @mock.patch(PATH + '_comment_format_legacy')
+ def test_find_comment_in_jira_old_comment(self,
+ mock_comment_format_legacy,
+ mock_comment_format):
+ """
+ This function tests '_find_comment_in_jira' where we find a old comment
+ """
+ # Set up return values
+ mock_comment_format.return_value = 'mock_comment_body'
+ mock_comment_format_legacy.return_value = 'mock_legacy_comment_body'
+ mock_jira_comment = MagicMock()
+ mock_jira_comment.raw = {'body': 'old_comment'}
+ mock_comment = {
+ 'id': '12345',
+ 'date_created': datetime(2019, 1, 1)
+ }
+
+ # Call the function
+ response = d._find_comment_in_jira(mock_comment, [mock_jira_comment])
+
+ # Assert everything was called correctly
+ mock_comment_format_legacy.assert_called_with(mock_comment)
+ mock_comment_format.assert_called_with(mock_comment)
+ self.assertEqual(response, mock_jira_comment)
+
+ @mock.patch(PATH + '_comment_format')
+ @mock.patch(PATH + '_comment_format_legacy')
+ def test_find_comment_in_jira_none(self,
+ mock_comment_format_legacy,
+ mock_comment_format):
+ """
+ This function tests '_find_comment_in_jira' where we return None
+ """
+ # Set up return values
+ mock_comment_format.return_value = 'mock_comment_body'
+ mock_comment_format_legacy.return_value = 'mock_legacy_comment_body'
+ mock_comment = {
+ 'id': '12345',
+ 'date_created': datetime(2019, 1, 1)
+ }
+
+ # Call the function
+ response = d._find_comment_in_jira(mock_comment, [])
+
+ # Assert everything was called correctly
+ mock_comment_format_legacy.assert_called_with(mock_comment)
+ mock_comment_format.assert_called_with(mock_comment)
+ self.assertEqual(response, None)
+
+ def test_check_jira_status_false(self):
+ """
+ This function tests 'check_jira_status' where we return false
+ """
+ # Set up return values
+ mock_jira_client = MagicMock()
+ mock_jira_client.search_issues.return_value = []
+
+ # Call the function
+ response = d.check_jira_status(mock_jira_client)
+
+ # Assert everything was called correctly
+ self.assertEqual(response, False)
+ mock_jira_client.search_issues.assert_called_with("issueFunction in linkedIssuesOfRemote('*')")
+
+ def test_check_jira_status_true(self):
+ """
+ This function tests 'check_jira_status' where we return false
+ """
+ # Set up return values
+ mock_jira_client = MagicMock()
+ mock_jira_client.search_issues.return_value = ['some', 'values']
+
+ # Call the function
+ response = d.check_jira_status(mock_jira_client)
+
+ # Assert everything was called correctly
+ self.assertEqual(response, True)
+ mock_jira_client.search_issues.assert_called_with("issueFunction in linkedIssuesOfRemote('*')")
+
+ def test_update_url_no_update(self):
+ """
+ This function tests '_update_url' where we already have the URL
+ """
+ # Set up return values
+ self.mock_downstream.fields.description = self.mock_issue.url
+
+ # Call the function
+ d._update_url(self.mock_downstream, self.mock_issue)
+
+ # Assert everything was called correctly
+ self.mock_downstream.update.assert_not_called()
+
+ @mock.patch(PATH + 'confluence_client')
+ def test_update_url_update(self,
+ mock_confluence_client):
+ """
+ This function tests '_update_url' where we already have the URL
+ """
+ # Set up return values
+ mock_confluence_client.update_stat = True
+ self.mock_downstream.fields.description = ""
+
+ # Call the function
+ d._update_url(self.mock_downstream, self.mock_issue)
+
+ # Assert everything was called correctly
+ self.mock_downstream.update.assert_called_with(
+ {'description':
+ f"\nUpstream URL: {self.mock_issue.url}\n"})
+
+ @mock.patch(PATH + 'confluence_client')
+ def test_update_on_close_update(self,
+ mock_confluence_client):
+ """
+ This function tests '_update_on_close' where there is an
+ "apply_labels" configuration, and labels need to be updated.
+ """
+ # Set up return values
+ mock_confluence_client.update_stat = True
+ self.mock_downstream.fields.description = ""
+ self.mock_issue.status = 'Closed'
+ updates = [{"on_close": {"apply_labels": ["closed-upstream"]}}]
+
+ # Call the function
+ d._update_on_close(self.mock_downstream, self.mock_issue, updates)
+
+ # Assert everything was called correctly
+ self.mock_downstream.update.assert_called_with(
+ {'labels':
+ ["closed-upstream", "tag3", "tag4"]})
+
+ def test_update_on_close_no_change(self):
+ """
+ This function tests '_update_on_close' where there is an
+ "apply_labels" configuration but there is no update required.
+ """
+ # Set up return values
+ self.mock_issue.status = 'Closed'
+ updates = [{"on_close": {"apply_labels": ["tag4"]}}]
+
+ # Call the function
+ d._update_on_close(self.mock_downstream, self.mock_issue, updates)
+
+ # Assert everything was called correctly
+ self.mock_downstream.update.assert_not_called()
+
+ def test_update_on_close_no_action(self):
+ """
+ This function tests '_update_on_close' where there is no
+ "apply_labels" configuration.
+ """
+ # Set up return values
+ self.mock_issue.status = 'Closed'
+ updates = [{"on_close": {"some_other_action": None}}]
+
+ # Call the function
+ d._update_on_close(self.mock_downstream, self.mock_issue, updates)
+
+ # Assert everything was called correctly
+ self.mock_downstream.update.assert_not_called()
+
+ def test_update_on_close_no_config(self):
+ """
+ This function tests '_update_on_close' where there is no
+ configuration for close events.
+ """
+ # Set up return values
+ self.mock_issue.status = 'Closed'
+ updates = ["description"]
+
+ # Call the function
+ d._update_on_close(self.mock_downstream, self.mock_issue, updates)
+
+ # Assert everything was called correctly
+ self.mock_downstream.update.assert_not_called()
diff --git a/tests/test_downstream_pr.py b/tests/test_downstream_pr.py
new file mode 100644
index 0000000..7e88273
--- /dev/null
+++ b/tests/test_downstream_pr.py
@@ -0,0 +1,337 @@
+import unittest
+import mock
+try:
+ # Python 3.3 >
+ from unittest.mock import MagicMock # noqa: F401
+except ImportError:
+ from mock import MagicMock # noqa: F401
+
+import sync2jira.downstream_pr as d
+
+PATH = 'sync2jira.downstream_pr.'
+
+
+class TestDownstreamPR(unittest.TestCase):
+ """
+ This class tests the downstream_pr.py file under sync2jira
+ """
+
+ def setUp(self):
+ """
+ Setting up the testing environment
+ """
+ self.mock_pr = MagicMock()
+ self.mock_pr.jira_key = 'JIRA-1234'
+ self.mock_pr.suffix = 'mock_suffix'
+ self.mock_pr.title = 'mock_title'
+ self.mock_pr.url = 'mock_url'
+ self.mock_pr.reporter = 'mock_reporter'
+ self.mock_pr.downstream = {'pr_updates': [
+ {'merge_transition': 'CUSTOM_TRANSITION1'},
+ {'link_transition': 'CUSTOM_TRANSITION2'},
+ ]}
+
+ self.mock_config = {
+ 'sync2jira': {
+ 'default_jira_instance': 'another_jira_instance',
+ 'jira': {
+ 'mock_jira_instance': {'mock_jira': 'mock_jira'},
+ 'another_jira_instance': {'basic_auth': ['mock_user'],
+ 'options': {'server': 'mock_server'}}
+ },
+ 'testing': False,
+ 'legacy_matching': False,
+ 'admins': [{'mock_admin': 'mock_email'}],
+ 'develop': False
+ },
+ }
+
+ self.mock_client = MagicMock()
+ mock_user = MagicMock()
+ mock_user.displayName = 'mock_reporter'
+ mock_user.key = 'mock_key'
+ self.mock_client.search_users.return_value = [mock_user]
+ self.mock_client.search_issues.return_value = ['mock_existing']
+
+ self.mock_existing = MagicMock()
+
+ @mock.patch(PATH + 'update_jira_issue')
+ @mock.patch(PATH + "d_issue")
+ @mock.patch(PATH + "update_transition")
+ def test_sync_with_jira_link(self,
+ mock_update_transition,
+ mock_d_issue,
+ mock_update_jira_issue):
+ """
+ This function tests 'sync_with_jira'
+ """
+ # Set up return values
+ mock_d_issue.get_jira_client.return_value = self.mock_client
+
+ # Call the function
+ d.sync_with_jira(self.mock_pr, self.mock_config)
+
+ # Assert everything was called correctly
+ mock_update_jira_issue.assert_called_with('mock_existing', self.mock_pr, self.mock_client)
+ self.mock_client.search_issues.assert_called_with('Key = JIRA-1234')
+ mock_d_issue.get_jira_client.assert_called_with(self.mock_pr, self.mock_config)
+ mock_update_transition.mock.asset_called_with(self.mock_client, 'mock_existing', self.mock_pr, 'link_transition')
+
+ @mock.patch(PATH + 'update_jira_issue')
+ @mock.patch(PATH + "d_issue")
+ @mock.patch(PATH + "update_transition")
+ def test_sync_with_jira_merged(self,
+ mock_update_transition,
+ mock_d_issue,
+ mock_update_jira_issue):
+ """
+ This function tests 'sync_with_jira'
+ """
+ # Set up return values
+ mock_client = MagicMock()
+ mock_client.search_issues.return_value = ['mock_existing']
+ mock_d_issue.get_jira_client.return_value = mock_client
+ self.mock_pr.suffix = 'merged'
+
+ # Call the function
+ d.sync_with_jira(self.mock_pr, self.mock_config)
+
+ # Assert everything was called correctly
+ mock_update_jira_issue.assert_called_with('mock_existing', self.mock_pr, mock_client)
+ mock_client.search_issues.assert_called_with('Key = JIRA-1234')
+ mock_d_issue.get_jira_client.assert_called_with(self.mock_pr, self.mock_config)
+ mock_update_transition.mock.asset_called_with(mock_client, 'mock_existing', self.mock_pr, 'merged_transition')
+
+ @mock.patch(PATH + 'update_jira_issue')
+ @mock.patch(PATH + "d_issue")
+ def test_sync_with_jira_no_issues_found(self,
+ mock_d_issue,
+ mock_update_jira_issue):
+ """
+ This function tests 'sync_with_jira' where no issues are found
+ """
+ # Set up return values
+ self.mock_client.search_issues.return_value = []
+ mock_d_issue.get_jira_client.return_value = self.mock_client
+
+ # Call the function
+ d.sync_with_jira(self.mock_pr, self.mock_config)
+
+ # Assert everything was called correctly
+ mock_update_jira_issue.assert_not_called()
+ self.mock_client.search_issues.assert_called_with('Key = JIRA-1234')
+ mock_d_issue.get_jira_client.assert_called_with(self.mock_pr, self.mock_config)
+
+ @mock.patch(PATH + 'update_jira_issue')
+ @mock.patch(PATH + "d_issue")
+ def test_sync_with_jira_testing(self,
+ mock_d_issue,
+ mock_update_jira_issue):
+ """
+ This function tests 'sync_with_jira' where no issues are found
+ """
+ # Set up return values
+ mock_client = MagicMock()
+ mock_client.search_issues.return_value = []
+ self.mock_config['sync2jira']['testing'] = True
+ mock_d_issue.get_jira_client.return_value = mock_client
+
+ # Call the function
+ d.sync_with_jira(self.mock_pr, self.mock_config)
+
+ # Assert everything was called correctly
+ mock_update_jira_issue.assert_not_called()
+ mock_client.search_issues.assert_not_called()
+ mock_d_issue.get_jira_client.assert_not_called()
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'comment_exists')
+ @mock.patch(PATH + 'format_comment')
+ @mock.patch(PATH + 'd_issue.attach_link')
+ @mock.patch(PATH + 'issue_link_exists')
+ def test_update_jira_issue_link(self,
+ mock_issue_link_exists,
+ mock_attach_link,
+ mock_format_comment,
+ mock_comment_exists,
+ mock_confluence_client):
+ """
+ This function tests 'update_jira_issue'
+ """
+ # Set up return values
+ mock_format_comment.return_value = 'mock_formatted_comment'
+ mock_comment_exists.return_value = False
+ mock_issue_link_exists.return_value = False
+ mock_confluence_client.update_stat = True
+
+ # Call the function
+ d.update_jira_issue('mock_existing', self.mock_pr, self.mock_client)
+
+ # Assert everything was called correctly
+ self.mock_client.add_comment.assert_called_with('mock_existing', 'mock_formatted_comment')
+ mock_format_comment.assert_called_with(self.mock_pr, self.mock_pr.suffix, self.mock_client)
+ mock_comment_exists.assert_called_with(self.mock_client, 'mock_existing', 'mock_formatted_comment')
+ mock_confluence_client.update_stat_page.assert_called_with({'Comments': 1})
+ mock_attach_link.assert_called_with(self.mock_client, 'mock_existing', {'url': 'mock_url', 'title': '[PR] mock_title'})
+
+ def test_issue_link_exists_false(self):
+ """
+ This function tests 'issue_link_exists' where it does not exist
+ """
+ # Set up return values
+ mock_issue_link = MagicMock()
+ mock_issue_link.object.url = 'bad_url'
+ self.mock_client.remote_links.return_value = [mock_issue_link]
+
+ # Call the function
+ ret = d.issue_link_exists(self.mock_client, self.mock_existing, self.mock_pr)
+
+ # Assert everything was called correctly
+ self.mock_client.remote_links.assert_called_with(self.mock_existing)
+ self.assertEqual(ret, False)
+
+ def test_issue_link_exists_true(self):
+ """
+ This function tests 'issue_link_exists' where it does exist
+ """
+ # Set up return values
+ mock_issue_link = MagicMock()
+ mock_issue_link.object.url = self.mock_pr.url
+ self.mock_client.remote_links.return_value = [mock_issue_link]
+
+ # Call the function
+ ret = d.issue_link_exists(self.mock_client, self.mock_existing, self.mock_pr)
+
+ # Assert everything was called correctly
+ self.mock_client.remote_links.assert_called_with(self.mock_existing)
+ self.assertEqual(ret, True)
+
+ @mock.patch(PATH + 'format_comment')
+ @mock.patch(PATH + 'comment_exists')
+ @mock.patch(PATH + 'd_issue.attach_link')
+ @mock.patch(PATH + 'issue_link_exists')
+ def test_update_jira_issue_exists(self,
+ mock_issue_link_exists,
+ mock_attach_link,
+ mock_comment_exists,
+ mock_format_comment,
+ ):
+ """
+ This function tests 'update_jira_issue' where the comment already exists
+ """
+ # Set up return values
+ mock_format_comment.return_value = 'mock_formatted_comment'
+ mock_comment_exists.return_value = True
+ mock_issue_link_exists.return_value = True
+
+ # Call the function
+ d.update_jira_issue('mock_existing', self.mock_pr, self.mock_client)
+
+ # Assert everything was called correctly
+ self.mock_client.add_comment.assert_not_called()
+ mock_format_comment.assert_called_with(self.mock_pr, self.mock_pr.suffix, self.mock_client)
+ mock_comment_exists.assert_called_with(self.mock_client, 'mock_existing', 'mock_formatted_comment')
+ mock_attach_link.assert_not_called()
+ mock_issue_link_exists.assert_called_with(self.mock_client, 'mock_existing', self.mock_pr)
+
+
+ def test_comment_exists_false(self):
+ """
+ This function tests 'comment_exists' where the comment does not exists
+ """
+ # Set up return values
+ mock_comment = MagicMock()
+ mock_comment.body = 'not_mock_new_comment'
+ self.mock_client.comments.return_value = [mock_comment]
+
+ # Call the function
+ response = d.comment_exists(self.mock_client, 'mock_existing', 'mock_new_comment')
+
+ # Assert Everything was called correctly
+ self.mock_client.comments.assert_called_with('mock_existing')
+ self.assertEqual(response, False)
+
+ def test_comment_exists_true(self):
+ """
+ This function tests 'comment_exists' where the comment exists
+ """
+ # Set up return values
+ mock_comment = MagicMock()
+ mock_comment.body = 'mock_new_comment'
+ self.mock_client.comments.return_value = [mock_comment]
+
+ # Call the function
+ response = d.comment_exists(self.mock_client, 'mock_existing', 'mock_new_comment')
+
+ # Assert Everything was called correctly
+ self.mock_client.comments.assert_called_with('mock_existing')
+ self.assertEqual(response, True)
+
+ def test_format_comment_closed(self):
+ """
+ This function tests 'format_comment' where the PR is closed
+ """
+ # Call the function
+ response = d.format_comment(self.mock_pr, 'closed', self.mock_client)
+
+ # Assert Everything was called correctly
+ self.assertEqual(response, "Merge request [mock_title| mock_url] was closed.")
+
+ def test_format_comment_reopened(self):
+ """
+ This function tests 'format_comment' where the PR is reopened
+ """
+ # Call the function
+ response = d.format_comment(self.mock_pr, 'reopened', self.mock_client)
+
+ # Assert Everything was called correctly
+ self.assertEqual(response, "Merge request [mock_title| mock_url] was reopened.")
+
+ def test_format_comment_merged(self):
+ """
+ This function tests 'format_comment' where the PR is merged
+ """
+ # Call the function
+ response = d.format_comment(self.mock_pr, 'merged', self.mock_client)
+
+ # Assert Everything was called correctly
+ self.assertEqual(response, "Merge request [mock_title| mock_url] was merged!")
+
+ def test_format_comment_open(self):
+ """
+ This function tests 'format_comment' where the PR is open
+ """
+ # Call the function
+ response = d.format_comment(self.mock_pr, 'open', self.mock_client)
+
+ # Assert Everything was called correctly
+ self.assertEqual(response, "[~mock_key] mentioned this issue in merge request [mock_title| mock_url].")
+
+
+ def test_format_comment_open_no_user_found(self):
+ """
+ This function tests 'format_comment' where the PR is open and search_users returns nothing
+ """
+ # Set up return values
+ self.mock_client.search_users.return_value = []
+
+ # Call the function
+ response = d.format_comment(self.mock_pr, 'open', self.mock_client)
+
+ # Assert Everything was called correctly
+ self.assertEqual(response, "mock_reporter mentioned this issue in merge request [mock_title| mock_url].")
+
+ @mock.patch(PATH + 'd_issue')
+ def test_update_transition(self,
+ mock_d_issue):
+ """
+ This function tests 'update_transition'
+ """
+ # Set up return values
+ mock_client = MagicMock()
+
+ # Call the function
+ d.update_transition(mock_client, self.mock_existing, self.mock_pr, 'merge_transition')
+
+ # Assert everything was called correctly
+ mock_d_issue.change_status.assert_called_with(mock_client, self.mock_existing, 'CUSTOM_TRANSITION1', self.mock_pr)
diff --git a/tests/test_intermediary.py b/tests/test_intermediary.py
new file mode 100644
index 0000000..3f214d8
--- /dev/null
+++ b/tests/test_intermediary.py
@@ -0,0 +1,366 @@
+from datetime import datetime
+import mock
+import unittest
+
+import sync2jira.intermediary as i
+
+PATH = 'sync2jira.intermediary.'
+
+class TestIntermediary(unittest.TestCase):
+ """
+ This class tests the downstream_issue.py file under sync2jira
+ """
+ def setUp(self):
+ self.mock_config = {
+ 'sync2jira': {
+ 'pagure_url': 'dummy_pagure_url',
+ 'map': {
+ 'pagure': {
+ 'pagure': {'mock_downstream': 'mock_key'}
+ },
+ 'github': {
+ 'github': {'mock_downstream': 'mock_key'}
+ }
+ }
+ }
+ }
+ self.mock_pagure_issue = {
+ 'comments': [{
+ 'date_created': '1234',
+ 'user': {
+ 'name': 'mock_name'
+ },
+ 'comment': 'mock_body',
+ 'id': '1234',
+ }],
+ 'title': 'mock_title',
+ 'id': 1234,
+ 'tags': 'mock_tags',
+ 'milestone': 'mock_milestone',
+ 'priority': 'mock_priority',
+ 'content': 'mock_content',
+ 'user': 'mock_reporter',
+ 'assignee': 'mock_assignee',
+ 'status': 'mock_status',
+ 'date_created': 'mock_date'
+ }
+
+ self.mock_github_issue = {
+ 'comments': [{
+ 'author': 'mock_author',
+ 'name': 'mock_name',
+ 'body': 'mock_body',
+ 'id': 'mock_id',
+ 'date_created': 'mock_date'
+ }],
+ 'title': 'mock_title',
+ 'html_url': 'mock_url',
+ 'id': 1234,
+ 'labels': 'mock_tags',
+ 'milestone': 'mock_milestone',
+ 'priority': 'mock_priority',
+ 'body': 'mock_content',
+ 'user': 'mock_reporter',
+ 'assignees': 'mock_assignee',
+ 'state': 'open',
+ 'date_created': 'mock_date',
+ 'number': '1',
+ }
+
+ self.mock_github_pr = {
+ 'comments': [{
+ 'author': 'mock_author',
+ 'name': 'mock_name',
+ 'body': 'mock_body',
+ 'id': 'mock_id',
+ 'date_created': 'mock_date'
+ }],
+ 'title': 'mock_title',
+ 'html_url': 'mock_url',
+ 'id': 1234,
+ 'labels': 'mock_tags',
+ 'milestone': 'mock_milestone',
+ 'priority': 'mock_priority',
+ 'body': 'mock_content',
+ 'user': {'fullname': 'mock_reporter'},
+ 'assignee': 'mock_assignee',
+ 'state': 'open',
+ 'date_created': 'mock_date',
+ 'number': 1234,
+ }
+
+ self.mock_pagure_pr = {
+ 'comments': [{
+ 'date_created': '1234',
+ 'user': {
+ 'name': 'mock_name'
+ },
+ 'comment': 'mock_body',
+ 'id': '1234',
+ }],
+ 'title': 'mock_title',
+ 'id': 1234,
+ 'tags': 'mock_tags',
+ 'milestone': 'mock_milestone',
+ 'priority': 'mock_priority',
+ 'content': 'mock_content',
+ 'user': {'fullname': 'mock_reporter'},
+ 'assignee': 'mock_assignee',
+ 'status': 'mock_status',
+ 'date_created': 'mock_date',
+ 'project': {'name': 'mock_project_name'},
+ 'initial_comment': 'mock_content_initial'
+ }
+
+ @mock.patch(PATH + 'datetime')
+ def test_from_pagure(self,
+ mock_datetime):
+ """
+ This tests the 'from_pagure' function under the Issue class
+ """
+ # Set up return values
+ mock_datetime.fromtimestamp.return_value = 'mock_date'
+
+ # Call the function
+ response = i.Issue.from_pagure(
+ upstream='pagure',
+ issue=self.mock_pagure_issue,
+ config=self.mock_config
+ )
+
+ # Assert that we made the calls correctly
+ self.assertEqual(response.source, 'pagure')
+ self.assertEqual(response.title, '[pagure] mock_title')
+ self.assertEqual(response.url, 'dummy_pagure_url/pagure/issue/1234')
+ self.assertEqual(response.upstream, 'pagure')
+ self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name',
+ 'author': 'mock_name', 'changed': None,
+ 'date_created': 'mock_date', 'id': '1234'}])
+ self.assertEqual(response.tags, 'mock_tags')
+ self.assertEqual(response.fixVersion, ['mock_milestone'])
+ self.assertEqual(response.priority, 'mock_priority')
+ self.assertEqual(response.content, 'mock_content')
+ self.assertEqual(response.reporter, 'mock_reporter')
+ self.assertEqual(response.assignee, 'mock_assignee')
+ self.assertEqual(response.status, 'mock_status')
+ self.assertEqual(response.id, 'mock_date')
+ self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'})
+
+ def test_from_github_open(self):
+ """
+ This tests the 'from_github' function under the Issue class where the state is open
+ """
+ # Call the function
+ response = i.Issue.from_github(
+ upstream='github',
+ issue=self.mock_github_issue,
+ config=self.mock_config
+ )
+
+ # Assert that we made the calls correctly
+ self.assertEqual(response.source, 'github')
+ self.assertEqual(response.title, '[github] mock_title')
+ self.assertEqual(response.url, 'mock_url')
+ self.assertEqual(response.upstream, 'github')
+ self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', 'author': 'mock_author',
+ 'changed': None, 'date_created': 'mock_date', 'id': 'mock_id'}])
+ self.assertEqual(response.tags, 'mock_tags')
+ self.assertEqual(response.fixVersion, ['mock_milestone'])
+ self.assertEqual(response.priority, None)
+ self.assertEqual(response.content, 'mock_content')
+ self.assertEqual(response.reporter, 'mock_reporter')
+ self.assertEqual(response.assignee, 'mock_assignee')
+ self.assertEqual(response.status, 'Open')
+ self.assertEqual(response.id, '1234')
+ self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'})
+
+ def test_from_github_closed(self):
+ """
+ This tests the 'from_github' function under the Issue class where the state is closed
+ """
+ # Set up return values
+ self.mock_github_issue['state'] = 'closed'
+
+ # Call the function
+ response = i.Issue.from_github(
+ upstream='github',
+ issue=self.mock_github_issue,
+ config=self.mock_config
+ )
+
+ # Assert that we made the calls correctly
+ self.assertEqual(response.source, 'github')
+ self.assertEqual(response.title, '[github] mock_title')
+ self.assertEqual(response.url, 'mock_url')
+ self.assertEqual(response.upstream, 'github')
+ self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', 'author': 'mock_author',
+ 'changed': None, 'date_created': 'mock_date', 'id': 'mock_id'}])
+ self.assertEqual(response.tags, 'mock_tags')
+ self.assertEqual(response.fixVersion, ['mock_milestone'])
+ self.assertEqual(response.priority, None)
+ self.assertEqual(response.content, 'mock_content')
+ self.assertEqual(response.reporter, 'mock_reporter')
+ self.assertEqual(response.assignee, 'mock_assignee')
+ self.assertEqual(response.status, 'Closed')
+ self.assertEqual(response.id, '1234')
+ self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'})
+
+ def test_mapping_github(self):
+ """
+ This tests the mapping feature from github
+ """
+ # Set up return values
+ self.mock_config['sync2jira']['map']['github']['github'] = {
+ 'mock_downstream': 'mock_key',
+ 'mapping': [{'fixVersion': 'Test XXX'}]
+ }
+ self.mock_github_issue['state'] = 'closed'
+
+ # Call the function
+ response = i.Issue.from_github(
+ upstream='github',
+ issue=self.mock_github_issue,
+ config=self.mock_config
+ )
+
+ # Assert that we made the calls correctly
+ self.assertEqual(response.source, 'github')
+ self.assertEqual(response.title, '[github] mock_title')
+ self.assertEqual(response.url, 'mock_url')
+ self.assertEqual(response.upstream, 'github')
+ self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', 'author': 'mock_author',
+ 'changed': None, 'date_created': 'mock_date', 'id': 'mock_id'}])
+ self.assertEqual(response.tags, 'mock_tags')
+ self.assertEqual(response.fixVersion, ['Test mock_milestone'])
+ self.assertEqual(response.priority, None)
+ self.assertEqual(response.content, 'mock_content')
+ self.assertEqual(response.reporter, 'mock_reporter')
+ self.assertEqual(response.assignee, 'mock_assignee')
+ self.assertEqual(response.status, 'Closed')
+ self.assertEqual(response.id, '1234')
+ self.assertEqual(response.downstream, {
+ 'mock_downstream': 'mock_key',
+ 'mapping': [{'fixVersion': 'Test XXX'}]})
+
+ @mock.patch(PATH + 'datetime')
+ def test_mapping_pagure(self,
+ mock_datetime):
+ """
+ This tests the mapping feature from pagure
+ """
+ # Set up return values
+ mock_datetime.fromtimestamp.return_value = 'mock_date'
+ self.mock_config['sync2jira']['map']['pagure']['pagure'] = {
+ 'mock_downstream': 'mock_key',
+ 'mapping': [{'fixVersion': 'Test XXX'}]
+ }
+
+ # Call the function
+ response = i.Issue.from_pagure(
+ upstream='pagure',
+ issue=self.mock_pagure_issue,
+ config=self.mock_config
+ )
+
+ # Assert that we made the calls correctly
+ self.assertEqual(response.source, 'pagure')
+ self.assertEqual(response.title, '[pagure] mock_title')
+ self.assertEqual(response.url, 'dummy_pagure_url/pagure/issue/1234')
+ self.assertEqual(response.upstream, 'pagure')
+ self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name',
+ 'author': 'mock_name', 'changed': None,
+ 'date_created': 'mock_date',
+ 'id': '1234'}])
+ self.assertEqual(response.tags, 'mock_tags')
+ self.assertEqual(response.fixVersion, ['Test mock_milestone'])
+ self.assertEqual(response.priority, 'mock_priority')
+ self.assertEqual(response.content, 'mock_content')
+ self.assertEqual(response.reporter, 'mock_reporter')
+ self.assertEqual(response.assignee, 'mock_assignee')
+ self.assertEqual(response.status, 'mock_status')
+ self.assertEqual(response.id, 'mock_date')
+ self.assertEqual(response.downstream, {
+ 'mock_downstream': 'mock_key',
+ 'mapping': [{'fixVersion': 'Test XXX'}]})
+
+ @mock.patch(PATH + 'matcher')
+ def test_from_github_pr_reopen(self,
+ mock_matcher):
+ """
+ This tests the from GitHub for a PR
+ """
+ # Set up return values
+ mock_matcher.return_value = "JIRA-1234"
+
+ # Call the function
+ response = i.PR.from_github(
+ upstream='github',
+ pr=self.mock_github_pr,
+ suffix='reopened',
+ config=self.mock_config
+ )
+
+ # Assert that we made the calls correctly
+ self.assertEqual(response.source, 'github')
+ self.assertEqual(response.title, '[github] mock_title')
+ self.assertEqual(response.url, 'mock_url')
+ self.assertEqual(response.upstream, 'github')
+ self.assertEqual(response.comments, [{'body': 'mock_body', 'name': 'mock_name', 'author': 'mock_author',
+ 'changed': None, 'date_created': 'mock_date', 'id': 'mock_id'}])
+ self.assertEqual(response.priority, None)
+ self.assertEqual(response.content, 'mock_content')
+ self.assertEqual(response.reporter, 'mock_reporter')
+ self.assertEqual(response.assignee, 'mock_assignee')
+ self.assertEqual(response.status, None)
+ self.assertEqual(response.id, '1234')
+ self.assertEqual(response.suffix, 'reopened')
+ self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'})
+ self.assertEqual(response.jira_key, "JIRA-1234")
+ self.mock_github_pr['comments'][0]['changed'] = None
+ mock_matcher.assert_called_with(self.mock_github_pr['body'], self.mock_github_pr['comments'])
+
+ @mock.patch(PATH + 'datetime')
+ @mock.patch(PATH + 'matcher')
+ def test_from_pagure_pr_reopen(self,
+ mock_matcher,
+ mock_datetime):
+ """
+ This tests the from Pagure for a PR
+ """
+ # Set up return values
+ mock_matcher.return_value = "JIRA-1234"
+ mock_datetime.fromtimestamp.return_value = '1234'
+
+ # Call the function
+ response = i.PR.from_pagure(
+ upstream='pagure',
+ pr=self.mock_pagure_pr,
+ suffix='reopened',
+ config=self.mock_config
+ )
+
+ # Assert that we made the calls correctly
+ formatted_comments = [{'author': 'mock_name', 'body': 'mock_body',
+ 'name': 'mock_name', 'id': '1234',
+ 'date_created': '1234',
+ 'changed': None}]
+ self.assertEqual(response.source, 'pagure')
+ self.assertEqual(response.title, '[pagure] mock_title')
+ self.assertEqual(response.url, 'https://pagure.io/mock_project_name/pull-request/1234')
+ self.assertEqual(response.upstream, 'pagure')
+ self.assertEqual(response.comments, formatted_comments)
+ self.assertEqual(response.priority, None)
+ self.assertEqual(response.content, 'mock_content_initial')
+ self.assertEqual(response.reporter, 'mock_reporter')
+ self.assertEqual(response.assignee, 'mock_assignee')
+ self.assertEqual(response.status, 'mock_status')
+ self.assertEqual(response.id, '1234')
+ self.assertEqual(response.suffix, 'reopened')
+ self.assertEqual(response.downstream, {'mock_downstream': 'mock_key'})
+ self.assertEqual(response.jira_key, "JIRA-1234")
+ self.mock_pagure_pr['comments'][0]['changed'] = None
+ mock_datetime.fromtimestamp.assert_called_with(float(1234))
+ mock_matcher.assert_called_with(self.mock_pagure_pr['initial_comment'], formatted_comments)
+
+
+ # TODO: Add new tests from PR
\ No newline at end of file
diff --git a/tests/test_main.py b/tests/test_main.py
new file mode 100644
index 0000000..0c76be6
--- /dev/null
+++ b/tests/test_main.py
@@ -0,0 +1,579 @@
+import mock
+import unittest
+try:
+ # Python 3.3 >
+ from unittest.mock import MagicMock # noqa: F401
+except ImportError:
+ from mock import MagicMock # noqa: F401
+
+import sync2jira.main as m
+
+
+PATH = 'sync2jira.main.'
+
+
+class TestMain(unittest.TestCase):
+ """
+ This class tests the main.py file under sync2jira
+ """
+ def setUp(self):
+ """
+ Set up the testing environment
+ """
+ # Mock Config dict
+ self.mock_config = {
+ 'sync2jira': {
+ 'jira': {
+ 'mock_jira_instance': {'mock_jira': 'mock_jira'}
+ },
+ 'confluence_statistics': True,
+ 'testing': {},
+ 'legacy_matching': False,
+ 'map': {
+ 'pagure': {'key_pagure': {'sync': ['issue', 'pullrequest']}},
+ 'github': {'key_github': {'sync': ['issue', 'pullrequest']}}
+ },
+ 'initialize': True,
+ 'listen': True,
+ 'develop': False,
+ },
+ }
+
+ # Mock Fedmsg Message
+ self.mock_message = {
+ 'msg_id': 'mock_id',
+ 'msg': {'issue': 'mock_issue'}
+ }
+
+ def _check_for_exception(self, loader, target, exc=ValueError):
+ try:
+ m.load_config(loader)
+ assert False, "Exception expected."
+ except exc as e:
+ self.assertIn(target, repr(e))
+
+ def test_config_validate_empty(self):
+ loader = lambda: {}
+ self._check_for_exception(loader, 'No sync2jira section')
+
+ def test_config_validate_missing_map(self):
+ loader = lambda: {'sync2jira': {}}
+ self._check_for_exception(loader, 'No sync2jira.map section')
+
+ def test_config_validate_mispelled_mappings(self):
+ loader = lambda: {'sync2jira': {'map': {'pageur': {}}}, 'jira': {}}
+ self._check_for_exception(loader, 'Specified handlers: "pageur", must')
+
+ def test_config_validate_missing_jira(self):
+ loader = lambda: {'sync2jira': {'map': {'pagure': {}}}}
+ self._check_for_exception(loader, 'No sync2jira.jira section')
+
+ def test_config_validate_all_good(self):
+ loader = lambda: {'sync2jira': {'map': {'pagure': {}}, 'jira': {}}}
+ m.load_config(loader) # ahhh, no exception.
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ @mock.patch(PATH + 'load_config')
+ def test_close_duplicates(self,
+ mock_load_config,
+ mock_d,
+ mock_u):
+ """
+ This tests the 'close_duplicates' function where everything goes smoothly
+ """
+ # Set up return values
+ mock_load_config.return_value = self.mock_config
+ mock_u.pagure_issues.return_value = ['mock_issue_github']
+ mock_u.github_issues.return_value = ['mock_issue_pagure']
+
+ # Call the function
+ m.close_duplicates()
+
+ # Assert everything was called correctly
+ mock_load_config.assert_called_once()
+ mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config)
+ mock_u.github_issues.assert_called_with('key_github', self.mock_config)
+ mock_d.close_duplicates.assert_any_call('mock_issue_github', self.mock_config)
+ mock_d.close_duplicates.assert_any_call('mock_issue_pagure', self.mock_config)
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ @mock.patch(PATH + 'load_config')
+ def test_close_duplicates_errors(self,
+ mock_load_config,
+ mock_d,
+ mock_u):
+ """
+ This tests the 'close_duplicates' function where closing duplicates raises an exception
+ """
+ # Set up return values
+ mock_load_config.return_value = self.mock_config
+ mock_u.pagure_issues.return_value = ['mock_issue']
+ mock_u.github_issues.return_value = ['mock_issue']
+ mock_d.close_duplicates.side_effect = Exception()
+
+ # Call the function
+ with self.assertRaises(Exception):
+ m.close_duplicates()
+
+ # Assert everything was called correctly
+ mock_load_config.assert_called_once()
+ mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config)
+ mock_u.github_issues.assert_not_called()
+ mock_d.close_duplicates.assert_called_with('mock_issue', self.mock_config)
+
+ @mock.patch(PATH + 'load_config')
+ @mock.patch(PATH + 'u_issue')
+ def test_list_managed(self,
+ mock_u,
+ mock_load_config):
+ """
+ This tests the 'list_managed' function
+ """
+ # Set up return values
+ mock_load_config.return_value = self.mock_config
+
+ # Call the function
+ m.list_managed()
+
+ # Assert everything was called correctly
+ mock_load_config.assert_called_once()
+ mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config)
+ mock_u.github_issues.assert_called_with('key_github', self.mock_config)
+
+ @mock.patch(PATH + 'initialize_recent')
+ @mock.patch(PATH + 'report_failure')
+ @mock.patch(PATH + 'INITIALIZE', 1)
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'initialize_issues')
+ @mock.patch(PATH + 'initialize_pr')
+ @mock.patch(PATH + 'load_config')
+ @mock.patch(PATH + 'listen')
+ def test_main_initialize(self,
+ mock_listen,
+ mock_load_config,
+ mock_initialize_pr,
+ mock_initialize_issues,
+ mock_confluence_client,
+ mock_report_failure,
+ mock_initialize_recent):
+ """
+ This tests the 'main' function
+ """
+ # Set up return values
+ mock_load_config.return_value = self.mock_config
+ self.mock_config['sync2jira']['confluence_statistics'] = True
+
+ # Call the function
+ m.main()
+
+ # Assert everything was called correctly
+ mock_load_config.assert_called_once()
+ mock_listen.assert_called_with(self.mock_config)
+ mock_listen.assert_called_with(self.mock_config)
+ mock_initialize_issues.assert_called_with(self.mock_config)
+ mock_initialize_pr.assert_called_with(self.mock_config)
+ mock_report_failure.assert_not_called()
+ mock_initialize_recent.assert_not_called()
+ mock_confluence_client.update_stat_value.assert_called_with(True)
+
+ @mock.patch(PATH + 'confluence_client')
+ @mock.patch(PATH + 'initialize_recent')
+ @mock.patch(PATH + 'report_failure')
+ @mock.patch(PATH + 'INITIALIZE', 0)
+ @mock.patch(PATH + 'initialize_issues')
+ @mock.patch(PATH + 'initialize_pr')
+ @mock.patch(PATH + 'load_config')
+ @mock.patch(PATH + 'listen')
+ def test_main_no_initialize(self,
+ mock_listen,
+ mock_load_config,
+ mock_initialize_pr,
+ mock_initialize_issues,
+ mock_report_failure,
+ mock_initialize_recent,
+ mock_confluence_client,):
+ """
+ This tests the 'main' function
+ """
+ # Set up return values
+ mock_load_config.return_value = self.mock_config
+
+ # Call the function
+ m.main()
+
+ # Assert everything was called correctly
+ mock_load_config.assert_called_once()
+ mock_listen.assert_called_with(self.mock_config)
+ mock_listen.assert_called_with(self.mock_config)
+ mock_initialize_issues.assert_not_called()
+ mock_initialize_pr.assert_not_called()
+ mock_report_failure.assert_not_called()
+ mock_initialize_recent.assert_called_with(self.mock_config)
+ mock_confluence_client.update_stat_value.assert_called_with(True)
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ def test_initialize(self,
+ mock_d,
+ mock_u):
+ """
+ This tests 'initialize' function where everything goes smoothly!
+ """
+ # Set up return values
+ mock_u.pagure_issues.return_value = ['mock_issue_pagure']
+ mock_u.github_issues.return_value = ['mock_issue_github']
+
+ # Call the function
+ m.initialize_issues(self.mock_config)
+
+ # Assert everything was called correctly
+ mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config)
+ mock_u.github_issues.assert_called_with('key_github', self.mock_config)
+ mock_d.sync_with_jira.assert_any_call('mock_issue_pagure', self.mock_config)
+ mock_d.sync_with_jira.assert_any_call('mock_issue_github', self.mock_config)
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ def test_initialize_repo_name_pagure(self,
+ mock_d,
+ mock_u):
+ """
+ This tests 'initialize' function where we want to sync an individual repo for Pagure
+ """
+ # Set up return values
+ mock_u.pagure_issues.return_value = ['mock_issue_pagure']
+ mock_u.github_issues.return_value = ['mock_issue_github']
+
+ # Call the function
+ m.initialize_issues(self.mock_config, repo_name='key_pagure')
+
+ # Assert everything was called correctly
+ mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config)
+ mock_u.github_issues.assert_not_called()
+ mock_d.sync_with_jira.assert_called_with('mock_issue_pagure', self.mock_config)
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ def test_initialize_repo_name_github(self,
+ mock_d,
+ mock_u):
+ """
+ This tests 'initialize' function where we want to sync an individual repo for GitHub
+ """
+ # Set up return values
+ mock_u.pagure_issues.return_value = ['mock_issue_pagure']
+ mock_u.github_issues.return_value = ['mock_issue_github']
+
+ # Call the function
+ m.initialize_issues(self.mock_config, repo_name='key_github')
+
+ # Assert everything was called correctly
+ mock_u.github_issues.assert_called_with('key_github', self.mock_config)
+ mock_u.pagure_issues.assert_not_called()
+ mock_d.sync_with_jira.assert_called_with('mock_issue_github', self.mock_config)
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ def test_initialize_errors(self,
+ mock_d,
+ mock_u):
+ """
+ This tests 'initialize' function where syncing with JIRA throws an exception
+ """
+ # Set up return values
+ mock_u.pagure_issues.return_value = ['mock_issue_pagure']
+ mock_u.github_issues.return_value = ['mock_issue_github']
+ mock_d.sync_with_jira.side_effect = Exception()
+
+ # Call the function
+ with self.assertRaises(Exception):
+ m.initialize_issues(self.mock_config)
+
+ # Assert everything was called correctly
+ mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config)
+ mock_d.sync_with_jira.assert_any_call('mock_issue_pagure', self.mock_config)
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ @mock.patch(PATH + 'sleep')
+ @mock.patch(PATH + 'report_failure')
+ def test_initialize_api_limit(self,
+ mock_report_failure,
+ mock_sleep,
+ mock_d,
+ mock_u):
+ """
+ This tests 'initialize' where we get an GitHub API limit error.
+ """
+ # Set up return values
+ mock_error = MagicMock(side_effect=Exception('API rate limit exceeded'))
+ mock_u.pagure_issues.return_value = ['mock_issue_pagure']
+ mock_u.github_issues.side_effect = mock_error
+
+ # Call the function
+ m.initialize_issues(self.mock_config, testing=True)
+
+ # Assert everything was called correctly
+ mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config)
+ mock_d.sync_with_jira.assert_any_call('mock_issue_pagure', self.mock_config)
+ mock_u.github_issues.assert_called_with('key_github', self.mock_config)
+ mock_sleep.assert_called_with(3600)
+ mock_report_failure.assert_not_called()
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ @mock.patch(PATH + 'sleep')
+ @mock.patch(PATH + 'report_failure')
+ def test_initialize_github_error(self,
+ mock_report_failure,
+ mock_sleep,
+ mock_d,
+ mock_u):
+ """
+ This tests 'initialize' where we get a GitHub API (not limit) error.
+ """
+ # Set up return values
+ mock_error = MagicMock(side_effect=Exception('Random Error'))
+ mock_u.pagure_issues.return_value = ['mock_issue_pagure']
+ mock_u.github_issues.side_effect = mock_error
+
+ # Call the function
+ with self.assertRaises(Exception):
+ m.initialize_issues(self.mock_config, testing=True)
+
+ # Assert everything was called correctly
+ mock_u.pagure_issues.assert_called_with('key_pagure', self.mock_config)
+ mock_d.sync_with_jira.assert_any_call('mock_issue_pagure', self.mock_config)
+ mock_u.github_issues.assert_called_with('key_github', self.mock_config)
+ mock_sleep.assert_not_called()
+ mock_report_failure.assert_called_with(self.mock_config)
+
+ @mock.patch(PATH + 'handle_msg')
+ @mock.patch(PATH + 'fedmsg')
+ def test_listen_no_handlers(self,
+ mock_fedmsg,
+ mock_handle_msg):
+ """
+ Test 'listen' function where suffix is not in handlers
+ """
+ # Set up return values
+ mock_fedmsg.tail_messages.return_value = [("dummy", "dummy", "mock_topic", self.mock_message)]
+
+ # Call the function
+ m.listen(self.mock_config)
+
+ # Assert everything was called correctly
+ mock_handle_msg.assert_not_called()
+
+ @mock.patch(PATH + 'handle_msg')
+ @mock.patch(PATH + 'issue_handlers')
+ @mock.patch(PATH + 'fedmsg')
+ def test_listen_no_issue(self,
+ mock_fedmsg,
+ mock_handlers_issue,
+ mock_handle_msg):
+ """
+ Test 'listen' function where the handler returns none
+ """
+ # Set up return values
+ mock_handlers_issue['github.issue.comment'].return_value = None
+ mock_fedmsg.tail_messages.return_value = [("dummy", "dummy", "d.d.d.pagure.issue.drop", self.mock_message)]
+
+ # Call the function
+ m.listen(self.mock_config)
+
+ # Assert everything was called correctly
+ mock_handle_msg.assert_not_called()
+
+ @mock.patch(PATH + 'handle_msg')
+ @mock.patch(PATH + 'issue_handlers')
+ @mock.patch(PATH + 'fedmsg')
+ def test_listen(self,
+ mock_fedmsg,
+ mock_handlers_issue,
+ mock_handle_msg):
+ """
+ Test 'listen' function where everything goes smoothly
+ """
+ # Set up return values
+ mock_handlers_issue['github.issue.comment'].return_value = 'dummy_issue'
+ mock_fedmsg.tail_messages.return_value = [("dummy", "dummy", "d.d.d.github.issue.comment", self.mock_message)]
+
+ # Call the function
+ m.listen(self.mock_config)
+
+ # Assert everything was called correctly
+ mock_handle_msg.assert_called_with(
+ self.mock_message,
+ 'github.issue.comment', self.mock_config)
+
+ @mock.patch(PATH + 'send_mail')
+ @mock.patch(PATH + 'jinja2')
+ def test_report_failure(self,
+ mock_jinja2,
+ mock_send_mail):
+ """
+ Tests 'report_failure' function
+ """
+ # Set up return values
+ mock_templateLoader = MagicMock()
+ mock_templateEnv = MagicMock()
+ mock_template = MagicMock()
+ mock_template.render.return_value = 'mock_html'
+ mock_templateEnv.get_template.return_value = mock_template
+ mock_jinja2.FileSystemLoader.return_value = mock_templateLoader
+ mock_jinja2.Environment.return_value = mock_templateEnv
+
+ # Call the function
+ m.report_failure({'sync2jira': {'mailing-list': 'mock_email'}})
+
+ # Assert everything was called correctly
+ mock_send_mail.assert_called_with(cc=None,
+ recipients=['mock_email'],
+ subject='Sync2Jira Has Failed!',
+ text='mock_html')
+
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ def test_handle_msg_no_handlers(self,
+ mock_d,
+ mock_u):
+ """
+ Tests 'handle_msg' function where there are no handlers
+ """
+ # Call the function
+ m.handle_msg(self.mock_message, 'no_handler', self.mock_config)
+
+ # Assert everything was called correctly
+ mock_d.sync_with_jira.assert_not_called()
+ mock_u.handle_github_message.assert_not_called()
+ mock_u.handle_pagure_message.assert_not_called()
+
+ @mock.patch(PATH + 'issue_handlers')
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ def test_handle_msg_no_issue(self,
+ mock_d,
+ mock_u,
+ mock_handlers_issue):
+ """
+ Tests 'handle_msg' function where there is no issue
+ """
+ # Set up return values
+ mock_handlers_issue['github.issue.comment'].return_value = None
+
+ # Call the function
+ m.handle_msg(self.mock_message, 'github.issue.comment', self.mock_config)
+
+ # Assert everything was called correctly
+ mock_d.sync_with_jira.assert_not_called()
+ mock_u.handle_github_message.assert_not_called()
+ mock_u.handle_pagure_message.assert_not_called()
+
+ @mock.patch(PATH + 'issue_handlers')
+ @mock.patch(PATH + 'u_issue')
+ @mock.patch(PATH + 'd_issue')
+ def test_handle_msg(self,
+ mock_d,
+ mock_u,
+ mock_handlers_issue):
+ """
+ Tests 'handle_msg' function
+ """
+ # Set up return values
+ mock_handlers_issue['github.issue.comment'].return_value = 'dummy_issue'
+ mock_u.handle_github_message.return_value = 'dummy_issue'
+
+ # Call the function
+ m.handle_msg(self.mock_message, 'github.issue.comment', self.mock_config)
+
+ # Assert everything was called correctly
+ mock_d.sync_with_jira.assert_called_with('dummy_issue', self.mock_config)
+ mock_u.handle_pagure_message.assert_not_called()
+
+ @mock.patch(PATH + 'handle_msg')
+ @mock.patch(PATH + 'query')
+ def test_initialize_recent(self,
+ mock_query,
+ mock_handle_msg):
+ """
+ Tests 'initialize_recent' function
+ """
+ # Set up return values
+ mock_query.return_value = [{
+ 'topic': 'm.m.m.github.issue.comment',
+ 'msg': 'mock_msg'
+
+ }]
+
+ # Call the function
+ m.initialize_recent(self.mock_config)
+
+ # Assert everything was called correctly
+ mock_handle_msg.assert_called_with({'msg': 'mock_msg'}, 'github.issue.comment', self.mock_config)
+
+ @mock.patch(PATH + 'handle_msg')
+ @mock.patch(PATH + 'query')
+ def test_initialize_recent_no_handler(self,
+ mock_query,
+ mock_handle_msg):
+ """
+ Tests 'initialize_recent' function where the topic is not for a valid handler
+ """
+ # Set up return values
+ mock_query.return_value = [{
+ 'topic': 'm.m.m.bad.topic',
+ 'msg': 'mock_msg'
+
+ }]
+
+ # Call the function
+ m.initialize_recent(self.mock_config)
+
+ # Assert everything was called correctly
+ mock_handle_msg.assert_not_called()
+
+ @mock.patch(PATH + 'get')
+ def test_query(self,
+ mock_get):
+ """
+ Tests 'query' function
+ """
+ # Set up return values
+ mock_get.return_value = {
+ 'raw_messages': ['test_msg'],
+ 'count': 1,
+ 'total': 1
+ }
+ # Call the function
+ response = m.query()
+
+ # Assert everything was called correctly
+ mock_get.assert_called_with(params={'order': 'asc'})
+ self.assertEqual(response, ['test_msg'])
+
+ @mock.patch(PATH + 'HTTPKerberosAuth')
+ @mock.patch(PATH + 'requests')
+ def test_get(self,
+ mock_requests,
+ mock_kerberos_auth):
+ """
+ Tests 'get' function
+ """
+ # Set up return values
+ mock_response = MagicMock()
+ mock_response.json.return_value = 'mock_return_value'
+ mock_requests.get.return_value = mock_response
+
+ # Call the function
+ response = m.get('mock_params')
+
+ # Assert everything was called correctly
+ self.assertEqual(response, 'mock_return_value')
+ mock_requests.get.assert_called_with(
+ auth=mock_kerberos_auth(),
+ headers={'Accept': 'application/json'},
+ params='mock_params',
+ url=m.DATAGREPPER_URL)
diff --git a/tests/test_upstream_issue.py b/tests/test_upstream_issue.py
new file mode 100644
index 0000000..a9ce9ae
--- /dev/null
+++ b/tests/test_upstream_issue.py
@@ -0,0 +1,597 @@
+import mock
+import unittest
+try:
+ # Python 3.3 >
+ from unittest.mock import MagicMock # noqa: F401
+except ImportError:
+ from mock import MagicMock # noqa: F401
+
+
+import sync2jira.upstream_issue as u
+
+
+PATH = 'sync2jira.upstream_issue.'
+
+
+class TestUpstreamIssue(unittest.TestCase):
+ """
+ This class tests the upstream_issue.py file under sync2jira
+ """
+ def setUp(self):
+ self.mock_config = {
+ 'sync2jira': {
+ 'map': {
+ 'github': {
+ 'org/repo': {'sync': ['issue']},
+ },
+ 'pagure': {
+ 'org/repo': {'sync': ['issue']},
+ },
+ },
+ 'jira': {
+ # Nothing, really..
+ },
+ 'filters': {
+ 'github':
+ {'org/repo': {'filter1': 'filter1', 'labels': 'custom_tag'}},
+ 'pagure':
+ {'org/repo': {'filter1': 'filter1', 'tags': ['custom_tag']}},
+ },
+ 'github_token': 'mock_token'
+ },
+ }
+ # Mock Pagure Message
+ self.mock_pagure_message = {
+ 'msg': {
+ 'project': {
+ 'name': 'org/repo'
+ },
+ 'issue': {
+ 'filter1': 'filter1',
+ 'tags': ['custom_tag'],
+ 'comments': [],
+ 'assignee': 'mock_assignee'
+ },
+ 'tags': ['new_tag'],
+ 'comment': 'new_comment',
+ 'status': 'temp'
+ },
+ 'topic': 'io.pagure.prod.pagure.issue.drop',
+ }
+
+ # Mock Github Comment
+ self.mock_github_comment = MagicMock()
+ self.mock_github_comment.user.name = 'mock_username'
+ self.mock_github_comment.user.login = 'mock_user_login'
+ self.mock_github_comment.body = 'mock_body'
+ self.mock_github_comment.id = 'mock_id'
+ self.mock_github_comment.created_at = 'mock_created_at'
+
+ # Mock Github Message
+ self.mock_github_message = {
+ 'msg': {
+ 'repository': {
+ 'owner': {
+ 'login': 'org'
+ },
+ 'name': 'repo'
+ },
+ 'issue': {
+ 'filter1': 'filter1',
+ 'labels': [{'name': 'custom_tag'}],
+ 'comments': ['some_comments!'],
+ 'number': 'mock_number',
+ 'user': {
+ 'login': 'mock_login'
+ },
+ 'assignees': [{'login': 'mock_login'}],
+ 'milestone': {
+ 'title': 'mock_milestone'
+ }
+ }
+ }
+ }
+
+ # Mock github issue
+ self.mock_github_issue = MagicMock()
+ self.mock_github_issue.get_comments.return_value = [self.mock_github_comment]
+
+ # Mock Github Issue Raw
+ self.mock_github_issue_raw = {
+ 'comments': ['some comment'],
+ 'number': '1234',
+ 'user': {
+ 'login': 'mock_login'
+ },
+ 'assignees': [{'login': 'mock_assignee_login'}],
+ 'labels': [{'name': 'some_label'}],
+ 'milestone': {
+ 'title': 'mock_milestone'
+ }
+ }
+
+ # Mock Github Reporter
+ self.mock_github_person = MagicMock()
+ self.mock_github_person.name = 'mock_name'
+
+ # Mock Github Repo
+ self.mock_github_repo = MagicMock()
+ self.mock_github_repo.get_issue.return_value = self.mock_github_issue
+
+ # Mock Github Client
+ self.mock_github_client = MagicMock()
+ self.mock_github_client.get_repo.return_value = self.mock_github_repo
+ self.mock_github_client.get_user.return_value = self.mock_github_person
+
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ @mock.patch(PATH + 'Github')
+ @mock.patch(PATH + 'get_all_github_data')
+ def test_github_issues(self,
+ mock_get_all_github_data,
+ mock_github,
+ mock_issue_from_github):
+ """
+ This function tests 'github_issues' function
+ """
+ # Set up return values
+ mock_github.return_value = self.mock_github_client
+ mock_get_all_github_data.return_value = [self.mock_github_issue_raw]
+ mock_issue_from_github.return_value = 'Successful Call!'
+
+ # Call the function
+ response = list(u.github_issues(
+ upstream='org/repo',
+ config=self.mock_config
+ ))
+
+ # Assert that calls were made correctly
+ try:
+ mock_get_all_github_data.assert_called_with(
+ 'https://api.github.com/repos/org/repo/issues?labels=custom_tag&filter1=filter1',
+ {'Authorization': 'token mock_token'}
+ )
+ except AssertionError:
+ mock_get_all_github_data.assert_called_with(
+ 'https://api.github.com/repos/org/repo/issues?filter1=filter1&labels=custom_tag',
+ {'Authorization': 'token mock_token'}
+ )
+ self.mock_github_client.get_user.assert_any_call('mock_login')
+ self.mock_github_client.get_user.assert_any_call('mock_assignee_login')
+ mock_issue_from_github.assert_called_with(
+ 'org/repo',
+ {'labels': ['some_label'], 'number': '1234', 'comments': [
+ {'body': 'mock_body', 'name': 'mock_user_login', 'author': 'mock_username', 'changed': None,
+ 'date_created': 'mock_created_at', 'id': 'mock_id'}], 'assignees': [{'fullname': 'mock_name'}],
+ 'user': {'login': 'mock_login', 'fullname': 'mock_name'}, 'milestone': 'mock_milestone'},
+ self.mock_config
+ )
+ self.mock_github_client.get_repo.assert_called_with('org/repo')
+ self.mock_github_repo.get_issue.assert_called_with(number='1234')
+ self.mock_github_issue.get_comments.assert_any_call()
+ self.assertEqual(response[0], 'Successful Call!')
+
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ @mock.patch(PATH + 'Github')
+ @mock.patch(PATH + 'get_all_github_data')
+ def test_github_issues_no_token(self,
+ mock_get_all_github_data,
+ mock_github,
+ mock_issue_from_github):
+ """
+ This function tests 'github_issues' function where we have no github token
+ and no comments
+ """
+ # Set up return values
+ self.mock_config['sync2jira']['github_token'] = None
+ self.mock_github_issue_raw['comments'] = 0
+ mock_github.return_value = self.mock_github_client
+ mock_get_all_github_data.return_value = [self.mock_github_issue_raw]
+ mock_issue_from_github.return_value = 'Successful Call!'
+
+ # Call the function
+ response = list(u.github_issues(
+ upstream='org/repo',
+ config=self.mock_config
+ ))
+
+ # Assert that calls were made correctly
+ try:
+ mock_get_all_github_data.assert_called_with(
+ 'https://api.github.com/repos/org/repo/issues?labels=custom_tag&filter1=filter1',
+ {}
+ )
+ except AssertionError:
+ mock_get_all_github_data.assert_called_with(
+ 'https://api.github.com/repos/org/repo/issues?filter1=filter1&labels=custom_tag',
+ {}
+ )
+ self.mock_github_client.get_user.assert_any_call('mock_login')
+ self.mock_github_client.get_user.assert_any_call('mock_assignee_login')
+ mock_issue_from_github.assert_called_with(
+ 'org/repo',
+ {'labels': ['some_label'], 'number': '1234', 'comments': [], 'assignees': [{'fullname': 'mock_name'}],
+ 'user': {'login': 'mock_login', 'fullname': 'mock_name'}, 'milestone': 'mock_milestone'},
+ self.mock_config
+ )
+ self.assertEqual(response[0], 'Successful Call!')
+ self.mock_github_client.get_repo.assert_not_called()
+ self.mock_github_repo.get_issue.assert_not_called()
+ self.mock_github_issue.get_comments.assert_not_called()
+
+ @mock.patch('sync2jira.intermediary.Issue.from_pagure')
+ @mock.patch(PATH + 'requests')
+ def test_pagure_issues_error(self,
+ mock_requests,
+ mock_issue_from_pagure):
+ """
+ This function tests 'pagure_issues' function where we get an IOError
+ """
+ # Set up return values
+ get_return = MagicMock()
+ get_return.__bool__ = mock.Mock(return_value=False)
+ get_return.__nonzero__ = get_return.__bool__
+ get_return.json.side_effect = Exception()
+ get_return.text.return_value = {
+ 'issues': [
+ {'assignee': 'mock_assignee'}
+ ]
+
+ }
+ mock_requests.get.return_value = get_return
+
+ # Call the function
+ with self.assertRaises(IOError):
+ list(u.pagure_issues(
+ upstream='org/repo',
+ config=self.mock_config
+ ))
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_called_with(
+ 'https://pagure.io/api/0/org/repo/issues',
+ params={'filter1': 'filter1', 'tags': ['custom_tag']}
+ )
+ mock_issue_from_pagure.assert_not_called()
+
+ @mock.patch('sync2jira.intermediary.Issue.from_pagure')
+ @mock.patch(PATH + 'requests')
+ def test_pagure_issues(self,
+ mock_requests,
+ mock_issue_from_pagure):
+ """
+ This function tests 'pagure_issues' function
+ """
+ # Set up return values
+ get_return = MagicMock()
+ get_return.json.return_value = {
+ 'issues': [
+ {'assignee': 'mock_assignee'}
+ ]
+
+ }
+ get_return.request.url = 'mock_url'
+ mock_requests.get.return_value = get_return
+ mock_issue_from_pagure.return_value = 'Successful Call!'
+
+ # Call the function
+ response = list(u.pagure_issues(
+ upstream='org/repo',
+ config=self.mock_config
+ ))
+
+ # Assert everything was called correctly
+ self.assertEqual(response[0], 'Successful Call!')
+ mock_requests.get.assert_called_with(
+ 'https://pagure.io/api/0/org/repo/issues',
+ params={'filter1': 'filter1', 'tags': ['custom_tag']}
+ )
+ mock_issue_from_pagure.assert_called_with(
+ 'org/repo',
+ {'assignee': ['mock_assignee']},
+ self.mock_config
+ )
+
+ @mock.patch('sync2jira.intermediary.Issue.from_pagure')
+ def test_handle_pagure_message_not_in_mapped(self,
+ mock_issue_from_pagure):
+ """
+ This function tests 'handle_pagure_message' where upstream is not in mapped repo
+ """
+ # Set up return values
+ self.mock_pagure_message['msg']['project']['name'] = 'bad_repo'
+ # Call the function
+ response = u.handle_pagure_message(
+ msg=self.mock_pagure_message,
+ config=self.mock_config
+ )
+
+ # Assert all calls made correctly
+ self.assertEqual(None, response)
+ mock_issue_from_pagure.assert_not_called()
+
+ @mock.patch('sync2jira.intermediary.Issue.from_pagure')
+ def test_handle_pagure_message_bad_filter(self,
+ mock_issue_from_pagure):
+ """
+ This function tests 'handle_pagure_message' where comparing the actual vs. filter does not equate
+ """
+ # Set up return values
+ self.mock_pagure_message['msg']['issue']['filter1'] = 'filter2'
+
+ # Call function
+ response = u.handle_pagure_message(
+ msg=self.mock_pagure_message,
+ config=self.mock_config)
+
+ # Assert that calls were made correctly
+ mock_issue_from_pagure.assert_not_called()
+ self.assertEqual(None, response)
+
+ @mock.patch('sync2jira.intermediary.Issue.from_pagure')
+ def test_handle_pagure_message_bad_tag(self,
+ mock_issue_from_pagure):
+ """
+ This function tests 'handle_pagure_message' where the tags do not match
+ """
+ # Set up return values
+ self.mock_pagure_message['msg']['issue']['tags'] = ['bad_tags']
+
+ # Call function
+ response = u.handle_pagure_message(
+ msg=self.mock_pagure_message,
+ config=self.mock_config)
+
+ # Assert that calls were made correctly
+ mock_issue_from_pagure.assert_not_called()
+ self.assertEqual(None, response)
+
+ @mock.patch('sync2jira.intermediary.Issue.from_pagure')
+ def test_handle_pagure_message_successful(self,
+ mock_issue_from_pagure):
+ """
+ This function tests 'handle_pagure_message' where everything goes smoothly
+ and we test edge cases!
+ """
+ # Set up return values
+ mock_issue_from_pagure.return_value = "Successful Call!"
+
+ # Call the function
+ response = u.handle_pagure_message(
+ msg=self.mock_pagure_message,
+ config=self.mock_config
+ )
+
+ # Assert that calls were made correctly
+ mock_issue_from_pagure.assert_called_with(
+ 'org/repo',
+ {'status': 'Dropped', 'assignee': ['mock_assignee'], 'filter1': 'filter1', 'comments': ['new_comment'],
+ 'tags': ['custom_tag', 'new_tag']},
+ self.mock_config
+ )
+ self.assertEqual(response, 'Successful Call!')
+
+ @mock.patch(PATH + 'Github')
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ def test_handle_github_message_not_in_mapped(self,
+ mock_issue_from_github,
+ mock_github):
+ """
+ This function tests 'handle_github_message' where upstream is not in mapped repos
+ """
+ # Set up return values
+ self.mock_github_message['msg']['repository']['owner']['login'] = 'bad_owner'
+
+ # Call the function
+ response = u.handle_github_message(
+ msg=self.mock_github_message,
+ config=self.mock_config
+ )
+
+ # Assert that all calls were made correctly
+ mock_issue_from_github.assert_not_called()
+ mock_github.assert_not_called()
+ self.assertEqual(None, response)
+
+ @mock.patch(PATH + 'Github')
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ def test_handle_github_message_pull_request(self,
+ mock_issue_from_github,
+ mock_github):
+ """
+ This function tests 'handle_github_message' the issue is a pull request comment
+ """
+ # Set up return values
+ self.mock_github_message['msg']['issue'] = {'pull_request': 'test'}
+
+ # Call the function
+ response = u.handle_github_message(
+ msg=self.mock_github_message,
+ config=self.mock_config
+ )
+
+ # Assert that all calls were made correctly
+ mock_issue_from_github.assert_not_called()
+ mock_github.assert_not_called()
+ self.assertEqual(None, response)
+
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ def test_handle_github_message_bad_filter(self,
+ mock_issue_from_github):
+ """
+ This function tests 'handle_github_message' where comparing the actual vs. filter does not equate
+ """
+ # Set up return values
+ self.mock_github_message['msg']['issue']['filter1'] = 'filter2'
+
+ # Call function
+ response = u.handle_github_message(
+ msg=self.mock_github_message,
+ config=self.mock_config
+ )
+ # Assert that calls were made correctly
+ mock_issue_from_github.assert_not_called()
+ self.assertEqual(None, response)
+
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ def test_handle_github_message_bad_label(self,
+ mock_issue_from_github):
+ """
+ This function tests 'handle_github_message' where comparing the actual vs. filter does not equate
+ """
+ # Set up return values
+ self.mock_github_message['msg']['issue']['labels'] = [{'name': 'bad_label'}]
+
+ # Call function
+ response = u.handle_github_message(
+ msg=self.mock_github_message,
+ config=self.mock_config
+ )
+ # Assert that calls were made correctly
+ mock_issue_from_github.assert_not_called()
+ self.assertEqual(None, response)
+
+ @mock.patch(PATH + 'Github')
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ def test_handle_github_message_no_comments(self,
+ mock_issue_from_github,
+ mock_github):
+ """
+ This function tests 'handle_github_message' where we have no comments
+ """
+ # Set up return values
+ mock_issue_from_github.return_value = "Successful Call!"
+ mock_github.return_value = self.mock_github_client
+ self.mock_github_message['msg']['issue']['comments'] = 0
+
+ # Call function
+ response = u.handle_github_message(
+ msg=self.mock_github_message,
+ config=self.mock_config
+ )
+ # Assert that calls were made correctly
+ mock_issue_from_github.assert_called_with('org/repo',
+ {'labels': ['custom_tag'], 'number': 'mock_number',
+ 'comments': [], 'assignees': [{'fullname': 'mock_name'}],
+ 'filter1': 'filter1',
+ 'user': {'login': 'mock_login', 'fullname': 'mock_name'},
+ 'milestone': 'mock_milestone'},
+ self.mock_config)
+ mock_github.assert_called_with('mock_token', retry=5)
+ self.assertEqual('Successful Call!', response)
+ self.mock_github_client.get_repo.assert_not_called()
+ self.mock_github_repo.get_issue.assert_not_called()
+ self.mock_github_issue.get_comments.assert_not_called()
+ self.mock_github_client.get_user.assert_called_with('mock_login')
+
+ @mock.patch(PATH + 'Github')
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ def test_handle_github_message_successful(self,
+ mock_issue_from_github,
+ mock_github):
+ """
+ This function tests 'handle_github_message' where everything goes smoothly!
+ """
+ # Set up return values
+ mock_issue_from_github.return_value = "Successful Call!"
+ mock_github.return_value = self.mock_github_client
+
+ # Call function
+ response = u.handle_github_message(
+ msg=self.mock_github_message,
+ config=self.mock_config
+ )
+
+ # Assert that calls were made correctly
+ mock_issue_from_github.assert_called_with('org/repo',
+ {'labels': ['custom_tag'], 'number': 'mock_number',
+ 'comments': [{'body': 'mock_body', 'name': 'mock_user_login',
+ 'author': 'mock_username', 'changed': None,
+ 'date_created': 'mock_created_at', 'id': 'mock_id'}],
+ 'assignees': [{'fullname': 'mock_name'}],
+ 'filter1': 'filter1', 'user':
+ {'login': 'mock_login', 'fullname': 'mock_name'},
+ 'milestone': 'mock_milestone'}, self.mock_config)
+ mock_github.assert_called_with('mock_token', retry=5)
+ self.assertEqual('Successful Call!', response)
+ self.mock_github_client.get_repo.assert_called_with('org/repo')
+ self.mock_github_repo.get_issue.assert_called_with(number='mock_number')
+ self.mock_github_issue.get_comments.assert_any_call()
+ self.mock_github_client.get_user.assert_called_with('mock_login')
+
+ @mock.patch(PATH + '_fetch_github_data')
+ @mock.patch(PATH + '_github_link_field_to_dict')
+ def test_get_all_github_data(self,
+ mock_github_link_field_to_dict,
+ mock_fetch_github_data):
+ """
+ This tests the '_get_all_github_data' function
+ """
+ # Set up return values
+ get_return = MagicMock()
+ get_return.json.return_value = [{'comments_url': 'mock_comments_url'}]
+ get_return.headers = {'link': 'mock_link'}
+ mock_fetch_github_data.return_value = get_return
+
+ # Call the function
+ response = list(u.get_all_github_data(
+ url='mock_url',
+ headers='mock_headers'
+ ))
+
+ # Assert everything was called correctly
+ mock_fetch_github_data.assert_any_call('mock_url', 'mock_headers')
+ mock_fetch_github_data.assert_any_call('mock_comments_url', 'mock_headers')
+ mock_github_link_field_to_dict.assert_called_with('mock_link')
+ self.assertEqual('mock_comments_url', response[0]['comments_url'])
+
+ @mock.patch(PATH + 'requests')
+ def test_fetch_github_data_error(self,
+ mock_requests):
+ """
+ Tests the '_fetch_github_data' function where we raise an IOError
+ """
+ # Set up return values
+ get_return = MagicMock()
+ get_return.__bool__ = mock.Mock(return_value=False)
+ get_return.__nonzero__ = get_return.__bool__
+ get_return.json.side_effect = Exception()
+ get_return.text.return_value = {
+ 'issues': [
+ {'assignee': 'mock_assignee'}
+ ]
+
+ }
+ mock_requests.get.return_value = get_return
+
+ # Call the function
+ with self.assertRaises(IOError):
+ u._fetch_github_data(
+ url='mock_url',
+ headers='mock_headers'
+ )
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_called_with('mock_url', headers='mock_headers')
+
+ @mock.patch(PATH + 'requests')
+ def test_fetch_github_data(self,
+ mock_requests):
+ """
+ Tests the '_fetch_github_data' function where everything goes smoothly!
+ """
+ # Set up return values
+ get_return = MagicMock()
+ get_return.__bool__ = mock.Mock(return_value=True)
+ get_return.__nonzero__ = get_return.__bool__
+ mock_requests.get.return_value = get_return
+
+ # Call the function
+
+ response = u._fetch_github_data(
+ url='mock_url',
+ headers='mock_headers'
+ )
+
+ # Assert everything was called correctly
+ mock_requests.get.assert_called_with('mock_url', headers='mock_headers')
+ self.assertEqual(response, get_return)
diff --git a/tests/test_upstream_pr.py b/tests/test_upstream_pr.py
new file mode 100644
index 0000000..1f438eb
--- /dev/null
+++ b/tests/test_upstream_pr.py
@@ -0,0 +1,336 @@
+import mock
+import unittest
+try:
+ # Python 3.3 >
+ from unittest.mock import MagicMock # noqa: F401
+except ImportError:
+ from mock import MagicMock # noqa: F401
+
+
+import sync2jira.upstream_pr as u
+
+
+PATH = 'sync2jira.upstream_pr.'
+
+
+class TestUpstreamPR(unittest.TestCase):
+ """
+ This class tests the upstream_pr.py file under sync2jira
+ """
+ def setUp(self):
+ self.mock_config = {
+ 'sync2jira': {
+ 'map': {
+ 'github': {
+ 'org/repo': {'sync': ['pullrequest']},
+ },
+ 'pagure': {
+ 'org/repo': {'sync': ['pullrequest']},
+ },
+ },
+ 'jira': {
+ # Nothing, really..
+ },
+ 'filters': {
+ 'github':
+ {'org/repo': {'filter1': 'filter1', 'labels': 'custom_tag'}},
+ 'pagure':
+ {'org/repo': {'filter1': 'filter1', 'tags': ['custom_tag']}},
+ },
+ 'github_token': 'mock_token'
+ },
+ }
+ # Mock Pagure Message
+ self.mock_pagure_message = {
+ 'msg': {
+ 'pullrequest': {
+ 'assignee': 'mock_assignee',
+ 'project': {
+ 'name': 'org/repo'
+ },
+ 'issue': {
+ 'filter1': 'filter1',
+ 'tags': ['custom_tag'],
+ 'comments': [{
+ 'date_created': '1234',
+ 'user': {
+ 'name': 'mock_name'
+ },
+ 'comment': 'mock_body',
+ 'id': '1234',
+ }],
+ 'assignee': 'mock_assignee'
+ },
+ 'tags': ['new_tag'],
+ 'comment': 'new_comment',
+ 'status': 'Open'
+ },
+ 'topic': 'io.pagure.prod.pagure.issue.drop',
+ }
+ }
+
+ # Mock Github Comment
+ self.mock_github_comment = MagicMock()
+ self.mock_github_comment.user.name = 'mock_username'
+ self.mock_github_comment.user.login = 'mock_user_login'
+ self.mock_github_comment.body = 'mock_body'
+ self.mock_github_comment.id = 'mock_id'
+ self.mock_github_comment.created_at = 'mock_created_at'
+
+ # Mock Github Message
+ self.mock_github_message = {
+ 'msg': {
+ 'repository': {
+ 'owner': {
+ 'login': 'org'
+ },
+ 'name': 'repo'
+ },
+ 'pull_request': {
+ 'filter1': 'filter1',
+ 'labels': [{'name': 'custom_tag'}],
+ 'comments': ['some_comments!'],
+ 'number': 'mock_number',
+ 'user': {
+ 'login': 'mock_login'
+ },
+ 'assignees': [{'login': 'mock_login'}],
+ 'milestone': {
+ 'title': 'mock_milestone'
+ }
+ },
+ }
+ }
+
+ # Mock github issue
+ self.mock_github_pr = MagicMock()
+ self.mock_github_pr.get_issue_comments.return_value = [self.mock_github_comment]
+
+ # Mock Github Issue Raw
+ self.mock_github_issue_raw = {
+ 'comments': ['some comment'],
+ 'number': '1234',
+ 'user': {
+ 'login': 'mock_login'
+ },
+ 'assignees': [{'login': 'mock_assignee_login'}],
+ 'labels': [{'name': 'some_label'}],
+ 'milestone': {
+ 'title': 'mock_milestone'
+ }
+ }
+
+ # Mock Github Reporter
+ self.mock_github_person = MagicMock()
+ self.mock_github_person.name = 'mock_name'
+
+ # Mock Github Repo
+ self.mock_github_repo = MagicMock()
+ self.mock_github_repo.get_pull.return_value = self.mock_github_pr
+ self.mock_github_repo.get_issue.return_value = self.mock_github_pr
+
+ # Mock Github Client
+ self.mock_github_client = MagicMock()
+ self.mock_github_client.get_repo.return_value = self.mock_github_repo
+ self.mock_github_client.get_user.return_value = self.mock_github_person
+
+ @mock.patch('sync2jira.intermediary.PR.from_pagure')
+ def test_handle_pagure_message(self,
+ mock_pr_from_pagure):
+ """
+ This function tests 'handle_pagure_message'
+ """
+ # Set up return values
+ mock_pr_from_pagure.return_value = "Successful Call!"
+
+ # Call the function
+ response = u.handle_pagure_message(
+ msg=self.mock_pagure_message,
+ config=self.mock_config,
+ suffix='comment',
+ )
+
+ # Assert that calls were made correctly
+ mock_pr_from_pagure.assert_called_with(
+ 'org/repo', {'assignee': ['mock_assignee'],
+ 'project': {'name': 'org/repo'},
+ 'issue': {'filter1': 'filter1',
+ 'tags': ['custom_tag'],
+ 'comments':
+ [{'date_created': '1234', 'user':
+ {'name': 'mock_name'},
+ 'comment': 'mock_body',
+ 'id': '1234'}],
+ 'assignee': 'mock_assignee'},
+ 'tags': ['new_tag'],
+ 'comment': 'new_comment', 'status': 'Open'},
+ 'open',
+ self.mock_config,
+ )
+ self.assertEqual(response, 'Successful Call!')
+
+ @mock.patch('sync2jira.intermediary.PR.from_pagure')
+ def test_handle_pagure_message_not_in_mapped(self,
+ mock_pr_from_pagure):
+ """
+ This function tests 'handle_pagure_message' where upstream is not in mapped repo
+ """
+ # Set up return values
+ self.mock_pagure_message['msg']['pullrequest']['project']['name'] = 'bad_repo'
+
+ # Call the function
+ response = u.handle_pagure_message(
+ msg=self.mock_pagure_message,
+ config=self.mock_config,
+ suffix='comment',
+ )
+
+ # Assert all calls made correctly
+ self.assertEqual(None, response)
+ mock_pr_from_pagure.assert_not_called()
+
+ @mock.patch(PATH + 'Github')
+ @mock.patch('sync2jira.intermediary.PR.from_github')
+ def test_handle_github_message(self,
+ mock_pr_from_github,
+ mock_github):
+ """
+ This function tests 'handle_github_message'
+ """
+ # Set up return values
+ mock_pr_from_github.return_value = "Successful Call!"
+ mock_github.return_value = self.mock_github_client
+
+ # Call function
+ response = u.handle_github_message(
+ msg=self.mock_github_message,
+ config=self.mock_config,
+ suffix='mock_suffix'
+ )
+
+ # Assert that calls were made correctly
+ mock_pr_from_github.assert_called_with(
+ 'org/repo',
+ {'filter1': 'filter1', 'labels': ['custom_tag'],
+ 'comments': [{'author': 'mock_username',
+ 'name': 'mock_user_login',
+ 'body': 'mock_body', 'id': 'mock_id',
+ 'date_created': 'mock_created_at',
+ 'changed': None}], 'number': 'mock_number',
+ 'user': {'login': 'mock_login', 'fullname': 'mock_name'},
+ 'assignees': [{'fullname': 'mock_name'}],
+ 'milestone': 'mock_milestone'}, 'mock_suffix', self.mock_config)
+ mock_github.assert_called_with('mock_token')
+ self.assertEqual('Successful Call!', response)
+ self.mock_github_client.get_repo.assert_called_with('org/repo')
+ self.mock_github_repo.get_pull.assert_called_with(number='mock_number')
+ self.mock_github_pr.get_issue_comments.assert_any_call()
+ self.mock_github_client.get_user.assert_called_with('mock_login')
+
+ @mock.patch(PATH + 'Github')
+ @mock.patch('sync2jira.intermediary.Issue.from_github')
+ def test_handle_github_message_not_in_mapped(self,
+ mock_issue_from_github,
+ mock_github):
+ """
+ This function tests 'handle_github_message' where upstream is not in mapped repos
+ """
+ # Set up return values
+ self.mock_github_message['msg']['repository']['owner']['login'] = 'bad_owner'
+
+ # Call the function
+ response = u.handle_github_message(
+ msg=self.mock_github_message,
+ config=self.mock_config,
+ suffix='mock_suffix'
+ )
+
+ # Assert that all calls were made correctly
+ mock_issue_from_github.assert_not_called()
+ mock_github.assert_not_called()
+ self.assertEqual(None, response)
+
+ @mock.patch('sync2jira.intermediary.PR.from_pagure')
+ @mock.patch(PATH + 'requests')
+ def test_pagure_issues(self,
+ mock_requests,
+ mock_pr_from_pagure):
+ """
+ This function tests 'pagure_issues' function
+ """
+ # Set up return values
+ get_return = MagicMock()
+ get_return.json.return_value = {
+ 'requests': [
+ {'assignee': 'mock_assignee'}
+ ]
+
+ }
+ get_return.request.url = 'mock_url'
+ mock_requests.get.return_value = get_return
+ mock_pr_from_pagure.return_value = 'Successful Call!'
+
+ # Call the function
+ response = list(u.pagure_prs(
+ upstream='org/repo',
+ config=self.mock_config,
+ ))
+
+ # Assert everything was called correctly
+ self.assertEqual(response[0], 'Successful Call!')
+ mock_requests.get.assert_called_with(
+ 'https://pagure.io/api/0/org/repo/pull-requests',
+ params={'filter1': 'filter1', 'tags': ['custom_tag']}
+ )
+ mock_pr_from_pagure.assert_called_with(
+ 'org/repo',
+ {'assignee': ['mock_assignee']},
+ 'open',
+ self.mock_config
+ )
+
+ @mock.patch('sync2jira.intermediary.PR.from_github')
+ @mock.patch(PATH + 'Github')
+ @mock.patch(PATH + 'u_issue.get_all_github_data')
+ def test_github_issues(self,
+ mock_get_all_github_data,
+ mock_github,
+ mock_pr_from_github):
+ """
+ This function tests 'github_issues' function
+ """
+ # Set up return values
+ mock_github.return_value = self.mock_github_client
+ mock_get_all_github_data.return_value = [self.mock_github_issue_raw]
+ mock_pr_from_github.return_value = 'Successful Call!'
+
+ # Call the function
+ response = list(u.github_prs(
+ upstream='org/repo',
+ config=self.mock_config
+ ))
+
+ # Assert that calls were made correctly
+ mock_get_all_github_data.assert_called_with(
+ 'https://api.github.com/repos/org/repo/pulls?filter1=filter1&labels=custom_tag',
+ {'Authorization': 'token mock_token'}
+ )
+ self.mock_github_client.get_user.assert_any_call('mock_login')
+ self.mock_github_client.get_user.assert_any_call('mock_assignee_login')
+ mock_pr_from_github.assert_called_with(
+ 'org/repo',
+ {'comments':
+ [{'author': 'mock_username', 'name': 'mock_user_login',
+ 'body': 'mock_body', 'id': 'mock_id',
+ 'date_created': 'mock_created_at', 'changed': None}],
+ 'number': '1234', 'user':
+ {'login': 'mock_login', 'fullname': 'mock_name'},
+ 'assignees': [{'fullname': 'mock_name'}],
+ 'labels': ['some_label'], 'milestone': 'mock_milestone'},
+ 'open',
+ self.mock_config
+ )
+ self.mock_github_client.get_repo.assert_called_with('org/repo')
+ self.mock_github_repo.get_pull.assert_called_with(number='1234')
+ self.mock_github_pr.get_issue_comments.assert_any_call()
+ self.assertEqual(response[0], 'Successful Call!')
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..db66d03
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,32 @@
+[tox]
+envlist = py39,lint
+
+[testenv]
+passenv = TRAVIS TRAVIS_*
+setenv =
+ DEFAULT_FROM = mock_email@mock.com
+ DEFAULT_SERVER = mock_server
+ INITIALIZE=1
+ CONFLUENCE_SPACE=mock_confluence_space
+ CONFLUENCE_PAGE_TITLE=mock_confluence_page_title
+ CONFLUENCE_URL=http://mock_confluence_url
+ CONFLUENCE_USERNAME=mock_confluence_username
+ CONFLUENCE_PASSWORD=mock_confluence_password
+basepython =
+ py39: python3.9
+deps =
+ -r{toxinidir}/requirements.txt
+ -r{toxinidir}/test-requirements.txt
+sitepackages = True
+whitelist_externals = /usr/bin/flake8
+commands =
+ coverage run -m pytest {posargs} --ignore=tests/integration_tests
+# Add the following line locally to get an HTML report --cov-report html:htmlcov-py39
+
+[testenv:lint]
+skip_install = true
+basepython = python3.9
+deps =
+ flake8
+commands =
+ flake8 sync2jira --max-line-length=140