Skip to content

Commit

Permalink
Merge pull request #5747 from nsoranzo/typos_refactors
Browse files Browse the repository at this point in the history
Typo fixes and small refactors
  • Loading branch information
jmchilton committed Mar 21, 2018
2 parents 0823d0e + 2921af9 commit 890e632
Show file tree
Hide file tree
Showing 7 changed files with 32 additions and 42 deletions.
2 changes: 1 addition & 1 deletion config/job_conf.xml.sample_advanced
Expand Up @@ -121,7 +121,7 @@
<!-- The Kubernetes (k8s) plugin allows to send jobs to a k8s cluster which shares filesystem with Galaxy.
This requires installing pykube. Install pykube by activating Galaxy's virtual
and then executing the following pip command:
environment and then executing the following pip command:
pip install pykube==0.15.0
Expand Down
3 changes: 1 addition & 2 deletions doc/source/admin/nginx.md
Expand Up @@ -69,10 +69,9 @@ http {
gzip on;
gzip_http_version 1.1;
gzip_vary on;
gzip_comp_level 4;
gzip_comp_level 6;
gzip_proxied any;
gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;
gzip_comp_level 6;
gzip_buffers 16 8k;
# allow up to 3 minutes for Galaxy to respond to slow requests before timing out
Expand Down
4 changes: 2 additions & 2 deletions doc/source/admin/scaling.md
Expand Up @@ -193,7 +193,7 @@ uwsgi:
# fix up signal handling
die-on-term: true
hook-master-start: unix_signal:2 gracefully_kill_them_all
hook-master-start: unix_signal:5 gracefully_kill_them_all
hook-master-start: unix_signal:15 gracefully_kill_them_all

# listening options

Expand Down Expand Up @@ -398,7 +398,7 @@ $ ./scripts/galaxy-main -c config/galaxy.yml --server-name handler2 --daemonize
However, a better option to managing processes by hand is to use a process manager as documented in the [Starting and
Stopping](#starting-and-stopping) section.

#### uWSGI Minutiea
#### uWSGI Minutiae

**Threads**

Expand Down
15 changes: 0 additions & 15 deletions lib/galaxy/datatypes/sniff.py
Expand Up @@ -97,21 +97,6 @@ def stream_to_file(stream, suffix='', prefix='', dir=None, text=False, **kwd):
return stream_to_open_named_file(stream, fd, temp_name, **kwd)


def check_newlines(fname, bytes_to_read=52428800):
"""
Determines if there are any non-POSIX newlines in the first
number_of_bytes (by default, 50MB) of the file.
"""
CHUNK_SIZE = 2 ** 20
with open(fname, 'r') as f:
for chunk in f.read(CHUNK_SIZE):
if f.tell() > bytes_to_read:
break
if chunk.count('\r'):
return True
return False


def convert_newlines(fname, in_place=True, tmp_dir=None, tmp_prefix="gxupload"):
"""
Converts in place a file from universal line endings
Expand Down
32 changes: 16 additions & 16 deletions lib/galaxy/webapps/config_manage.py
@@ -1,7 +1,6 @@
from __future__ import absolute_import, print_function

import argparse
import copy
import os
import shutil
import string
Expand Down Expand Up @@ -56,12 +55,12 @@
}),
('buffer-size', {
'desc': """By default uWSGI allocates a very small buffer (4096 bytes) for the headers of each request. If you start receiving "invalid request block size" in your logs, it could mean you need a bigger buffer. Increase it up to 65535.""",
'default': '4096',
'default': 4096,
'type': 'int',
}),
('processes', {
'desc': """Number of web server (worker) processes to fork after the application has loaded.""",
'default': '1',
'default': 1,
'type': 'int',
}),
('threads', {
Expand All @@ -71,7 +70,7 @@
}),
('offload-threads', {
'desc': """Number of threads for serving static content and handling internal routing requests.""",
'default': '2',
'default': 2,
'type': 'int',
}),
('static-map.1', {
Expand All @@ -88,8 +87,8 @@
}),
('master', {
'desc': """Enable the master process manager. Disabled by default for maximum compatibility with CTRL+C, but should be enabled for use with --daemon and/or production deployments.""",
'default': 'false',
'type': 'str',
'default': False,
'type': 'bool',
}),
('virtualenv', {
'desc': """Path to the application's Python virtual environment.""",
Expand All @@ -108,8 +107,8 @@
}),
('die-on-term', {
'desc': """Cause uWSGI to respect the traditional behavior of dying on SIGTERM (its default is to brutally reload workers)""",
'default': 'true',
'type': 'str',
'default': True,
'type': 'bool',
}),
('hook-master-start.1', {
'key': 'hook-master-start',
Expand All @@ -125,13 +124,13 @@
}),
('py-call-osafterfork', {
'desc': """Feature necessary for proper mule signal handling""",
'default': 'true',
'type': 'str',
'default': True,
'type': 'bool',
}),
('enable-threads', {
'desc': """Ensure application threads will run if `threads` is unset.""",
'default': 'true',
'type': 'str',
'default': True,
'type': 'bool',
}),
# ('route-uri', {
# 'default': '^/proxy/ goto:proxy'
Expand All @@ -149,7 +148,7 @@
# 'default': "['log:Proxy ${HTTP_HOST} to ${TARGET_HOST}', 'httpdumb:${TARGET_HOST}']",
# }),
# ('http-raw-body', {
# 'default': 'True'
# 'default': True
# }),
])

Expand Down Expand Up @@ -664,8 +663,7 @@ def _replace_file(args, f, app_desc, from_path, to_path):
def _build_sample_yaml(args, app_desc):
schema = app_desc.schema
f = StringIO()
options = copy.deepcopy(UWSGI_OPTIONS)
for key, value in options.items():
for key, value in UWSGI_OPTIONS.items():
for field in ["desc", "default"]:
if field not in value:
continue
Expand All @@ -684,7 +682,7 @@ def _build_sample_yaml(args, app_desc):
description = description.lstrip()
as_comment = "\n".join(["# %s" % l for l in description.split("\n")]) + "\n"
f.write(as_comment)
_write_sample_section(args, f, 'uwsgi', Schema(options), as_comment=False, uwsgi_hack=True)
_write_sample_section(args, f, 'uwsgi', Schema(UWSGI_OPTIONS), as_comment=False, uwsgi_hack=True)
_write_sample_section(args, f, app_desc.app_name, schema)
destination = os.path.join(args.galaxy_root, app_desc.sample_destination)
_write_to_file(args, f, destination)
Expand Down Expand Up @@ -744,6 +742,8 @@ def _write_option(args, f, key, option_value, as_comment=False, uwsgi_hack=False
comment += "\n"
as_comment_str = "#" if as_comment else ""
if uwsgi_hack:
if option.get("type", "str") == "bool":
value = str(value).lower()
key_val_str = "%s: %s" % (key, value)
else:
key_val_str = yaml.dump({key: value}, width=float("inf")).lstrip("{").rstrip("\n}")
Expand Down
14 changes: 10 additions & 4 deletions lib/galaxy/webapps/galaxy/api/remote_files.py
Expand Up @@ -8,8 +8,14 @@
from operator import itemgetter

from galaxy import exceptions
from galaxy.util import jstree, unicodify
from galaxy.util.path import safe_path, safe_walk
from galaxy.util import (
jstree,
smart_str
)
from galaxy.util.path import (
safe_path,
safe_walk
)
from galaxy.web import _future_expose_api as expose_api
from galaxy.web.base.controller import BaseAPIController

Expand Down Expand Up @@ -135,13 +141,13 @@ def __create_jstree(self, directory, disable='folders', whitelist=None):
for (dirpath, dirnames, filenames) in safe_walk(directory, whitelist=whitelist):
for dirname in dirnames:
dir_path = os.path.relpath(os.path.join(dirpath, dirname), directory)
dir_path_hash = hashlib.sha1(unicodify(dir_path).encode('utf-8')).hexdigest()
dir_path_hash = hashlib.sha1(smart_str(dir_path)).hexdigest()
disabled = True if disable == 'folders' else False
jstree_paths.append(jstree.Path(dir_path, dir_path_hash, {'type': 'folder', 'state': {'disabled': disabled}, 'li_attr': {'full_path': dir_path}}))

for filename in filenames:
file_path = os.path.relpath(os.path.join(dirpath, filename), directory)
file_path_hash = hashlib.sha1(unicodify(file_path).encode('utf-8')).hexdigest()
file_path_hash = hashlib.sha1(smart_str(file_path)).hexdigest()
disabled = True if disable == 'files' else False
jstree_paths.append(jstree.Path(file_path, file_path_hash, {'type': 'file', 'state': {'disabled': disabled}, 'li_attr': {'full_path': file_path}}))
else:
Expand Down
4 changes: 2 additions & 2 deletions test/api/test_workflows.py
Expand Up @@ -1845,7 +1845,7 @@ def test_workflow_rerun_with_use_cached_job(self):
first_wf_output = self._get("datasets/%s" % run_workflow_response['outputs'][0]).json()
second_wf_output = self._get("datasets/%s" % new_workflow_response['outputs'][0]).json()
assert first_wf_output['file_name'] == second_wf_output['file_name'], \
"first output :\n%s\nsecond output: %s" % (first_wf_output, second_wf_output)
"first output:\n%s\nsecond output:\n%s" % (first_wf_output, second_wf_output)

@skip_without_tool('cat1')
def test_nested_workflow_rerun_with_use_cached_job(self):
Expand All @@ -1860,7 +1860,7 @@ def test_nested_workflow_rerun_with_use_cached_job(self):
run_jobs_summary = self._run_jobs(workflow_run_description, history_id=history_id_one)
self.dataset_populator.wait_for_history(history_id_one, assert_ok=True)
workflow_request = run_jobs_summary.workflow_request
# We copy the inputs to a new history and re-reun the workflow
# We copy the inputs to a new history and re-run the workflow
inputs = json.loads(workflow_request['inputs'])
dataset_type = inputs['outer_input']['src']
dataset_id = inputs['outer_input']['id']
Expand Down

0 comments on commit 890e632

Please sign in to comment.