Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/release_18.01' into usegalaxy
Browse files Browse the repository at this point in the history
  • Loading branch information
natefoo committed Apr 30, 2018
2 parents c5095d1 + aebc0da commit 44fc5d6
Show file tree
Hide file tree
Showing 37 changed files with 172 additions and 75 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -593,8 +593,7 @@ var PairedCollectionCreator = Backbone.View.extend(baseMVC.LoggableMixin)

// ------------------------------------------------------------------------ API
/** convert a pair into JSON compatible with the collections API */
_pairToJSON: function(pair, src) {
src = src || "hda";
_pairToJSON: function(pair) {
//TODO: consider making this the pair structure when created instead
return {
collection_type: "paired",
Expand All @@ -604,12 +603,12 @@ var PairedCollectionCreator = Backbone.View.extend(baseMVC.LoggableMixin)
{
name: "forward",
id: pair.forward.id,
src: src
src: pair.forward.src || "hda"
},
{
name: "reverse",
id: pair.reverse.id,
src: src
src: pair.reverse.src || "hda"
}
]
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ var FolderToolbarView = Backbone.View.extend({
id: this.options.id,
is_admin: false,
is_anonym: true,
mutiple_add_dataset_options: false
multiple_add_dataset_options: false
};
if (Galaxy.user) {
template_defaults.is_admin = Galaxy.user.isAdmin();
Expand All @@ -88,7 +88,7 @@ var FolderToolbarView = Backbone.View.extend({
Galaxy.config.allow_library_path_paste !== false ||
Galaxy.config.library_import_dir !== null
) {
template_defaults.mutiple_add_dataset_options = true;
template_defaults.multiple_add_dataset_options = true;
}
}
this.$el.html(toolbar_template(template_defaults));
Expand Down Expand Up @@ -1390,7 +1390,7 @@ var FolderToolbarView = Backbone.View.extend({
'<span class="fa fa-plus"></span><span class="fa fa-folder"></span> Create Folder ',
"</button>",
// add datasets button
"<% if(mutiple_add_dataset_options) { %>",
"<% if(multiple_add_dataset_options) { %>",
'<div class="btn-group add-library-items add-library-items-datasets toolbar-item" style="display:none;">',
'<button title="Add Datasets to Current Folder" id="" type="button" class="primary-button dropdown-toggle" data-toggle="dropdown">',
'<span class="fa fa-plus"></span><span class="fa fa-file"></span> Add Datasets <span class="caret"></span>',
Expand All @@ -1400,7 +1400,7 @@ var FolderToolbarView = Backbone.View.extend({
"<% if(Galaxy.config.user_library_import_dir !== null) { %>",
'<li><a href="#folders/<%= id %>/import/userdir"> from User Directory</a></li>',
"<% } %>",
"<% if(Galaxy.config.allow_library_path_paste) { %>",
"<% if(Galaxy.config.library_import_dir !== null || Galaxy.config.allow_library_path_paste) { %>",
'<li class="divider"></li>',
'<li class="dropdown-header">Admins only</li>',
"<% if(Galaxy.config.library_import_dir !== null) { %>",
Expand Down
3 changes: 0 additions & 3 deletions config/job_conf.xml.sample_basic
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@
<plugins>
<plugin id="local" type="runner" load="galaxy.jobs.runners.local:LocalJobRunner" workers="4"/>
</plugins>
<handlers>
<handler id="main"/>
</handlers>
<destinations>
<destination id="local" runner="local"/>
</destinations>
Expand Down
2 changes: 1 addition & 1 deletion config/local_conda_mapping.yml.sample
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# See $GALAXY_ROOT/lib/galaxy/tools/deps/resolvers/default_conda_mapping.yml for example mapping -
# additional site-specific mappings can be added to config/conda_mapping.yml.
# additional site-specific mappings can be added to config/local_conda_mapping.yml.
2 changes: 1 addition & 1 deletion doc/source/admin/cluster.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ Additionally some of the runners including DRMAA may use the ``cluster_files_dir
cluster_files_directory: database/pbs
```
You may also find that attribute caching in your filesystem causes problems with job completion since it interferes with Galaxy detecting the presence and correct sizes of output files. In NFS caching can be disabled with the `-noac` mount option on Linux (on the Galaxy server), but this may have a significant impact on performance since all attributes will have to be read from the file server upon every file access. You should try the `retry_output_collection` option in `galaxy.yml` first to see if this solves the problem.
You may also find that attribute caching in your filesystem causes problems with job completion since it interferes with Galaxy detecting the presence and correct sizes of output files. In NFS caching can be disabled with the `-noac` mount option on Linux (on the Galaxy server), but this may have a significant impact on performance since all attributes will have to be read from the file server upon every file access. You should try the `retry_job_output_collection` option in `galaxy.yml` first to see if this solves the problem.

## Runner Configuration

Expand Down
4 changes: 2 additions & 2 deletions doc/source/admin/jobs.md
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ As a natural extension to this, a dynamic job runner can be used as the default
The following example assumes the existence of a job destination with ids `short_pbs` and `long_pbs` and that a default dynamic job runner has been defined as follows in `job_conf.xml`:

```xml
<destination default="dynamic">
<destinations default="dynamic">
<destination id="dynamic">
<param id="type">python</param>
<param id="function">default_runner</param>
Expand All @@ -317,7 +317,7 @@ def default_runner(tool_id):
As another example, assume that a few tools should be only accessible to developers and all other users should receive a message indicating they are not authorized to use this tool. This can be accomplished with the following `job_conf.xml` fragment

```xml
<destination default="dynamic">
<destinations default="dynamic">
<destination id="dev_dynamic">
<param id="type">python</param>
<param id="function">dev_only</param>
Expand Down
4 changes: 2 additions & 2 deletions doc/source/admin/scaling.md
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ uwsgi:
# fix up signal handling
die-on-term: true
hook-master-start: unix_signal:2 gracefully_kill_them_all
hook-master-start: unix_signal:5 gracefully_kill_them_all
hook-master-start: unix_signal:15 gracefully_kill_them_all

# listening options

Expand Down Expand Up @@ -231,7 +231,7 @@ To use the native uWSGI protocol, set the `socket` option:

```yaml
# listening options
socket: unix:///srv/galaxy/var/uwsgi.sock
socket: /srv/galaxy/var/uwsgi.sock
```
Here we've used a UNIX domain socket because there's less overhead than a TCP socket and it can be secured by filesystem
Expand Down
13 changes: 13 additions & 0 deletions doc/source/admin/special_topics/bug_reports.rst
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,16 @@ adjust as needed for how many error reports you see) in conjunction with any
other data you're already tracking in InfluxDB/Grafana. This setup allows
answering questions such as "did the change I make decrease the number of tool
failures on average"

GitHub
------

This plugin submits user bug reports to a GitHub repository of your choice. It
uses the HTML formatter from the email bug reporter.

Before submitting a bug report, the plugin will search for any open issues with
a matching title. If there are no matching issues, it opens a new issue,
otherwise, it makes a comment on the existing issue. This provides some measure
of de-duplication.

Issues will be tagged with the tool + version
26 changes: 26 additions & 0 deletions lib/galaxy/containers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,32 @@ def is_ready(self):
:rtpe: bool
"""

def map_port(self, port):
"""Map a given container port to a host address/port.
For legacy reasons, if port is ``None``, the first port (if any) will be returned
:param port: Container port to map
:type port: int
:returns: Mapping to host address/port for given container port
:rtype: :class:`ContainerPort` instance
"""
mapping = None
for mapping in self.ports:
if port == mapping.port:
return mapping
if port is None:
log.warning("Container %s (%s): Don't know how to map ports to containers with multiple exposed ports "
"when a specific port is not requested. Arbitrarily choosing first: %s",
self.name, self.id, mapping)
return mapping
else:
if port is None:
log.warning("Container %s (%s): No exposed ports found!", self.name, self.id)
else:
log.warning("Container %s (%s): No mapping found for port: %s", self.name, self.id, port)
return None


class ContainerInterface(with_metaclass(ABCMeta, object)):

Expand Down
3 changes: 3 additions & 0 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3134,11 +3134,13 @@ def __init__(
id=None,
collection_type=None,
populated=True,
element_count=None
):
self.id = id
self.collection_type = collection_type
if not populated:
self.populated_state = DatasetCollection.populated_states.NEW
self.element_count = element_count

@property
def populated(self):
Expand Down Expand Up @@ -3211,6 +3213,7 @@ def __getitem__(self, key):
def copy(self, destination=None, element_destination=None):
new_collection = DatasetCollection(
collection_type=self.collection_type,
element_count=self.element_count
)
for element in self.elements:
element.copy_to_collection(
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,6 @@
"CONVERTER_maf_to_interval_0",
"CONVERTER_wiggle_to_interval_0",
# Tools improperly migrated to the tool shed (devteam)
"lastz_wrapper_2",
"qualityFilter",
"winSplitter",
"pileup_interval",
Expand Down Expand Up @@ -166,6 +165,7 @@
"PEsortedSAM2readprofile": packaging.version.parse("1.1.1"),
"fetchflank": packaging.version.parse("1.0.1"),
"Extract genomic DNA 1": packaging.version.parse("3.0.0"),
"lastz_wrapper_2": packaging.version.parse("1.3"),
}


Expand Down
4 changes: 1 addition & 3 deletions lib/galaxy/tools/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,8 @@ def exists(self, path):
path = os.path.abspath(path)
if path in self.tool_data_path_files:
return True
elif self.tool_data_path not in path:
return os.path.exists(path)
else:
return False
return os.path.exists(path)


class ToolDataTableManager(object):
Expand Down
6 changes: 4 additions & 2 deletions lib/galaxy/tools/data_manager/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,8 @@ def process_result(self, out_data):
# moving a directory and the target already exists, we move the contents instead
log.debug('Attempting to add entries for undeclared tables: %s.', ', '.join(data_tables_dict.keys()))
for ref_file in out_data.values():
util.move_merge(ref_file.extra_files_path, self.data_managers.app.config.galaxy_data_manager_data_path)
if os.path.exists(ref_file.extra_files_path):
util.move_merge(ref_file.extra_files_path, self.data_managers.app.config.galaxy_data_manager_data_path)
path_column_names = ['path']
for data_table_name, data_table_values in data_tables_dict.items():
data_table = self.data_managers.app.tool_data_tables.get(data_table_name, None)
Expand Down Expand Up @@ -373,7 +374,8 @@ def process_move(self, data_table_name, column_name, source_base_path, relative_
if e.errno != errno.EEXIST:
raise e
# moving a directory and the target already exists, we move the contents instead
util.move_merge(source, target)
if os.path.exists(source):
util.move_merge(source, target)

if move_dict.get('relativize_symlinks', False):
util.relativize_symlinks(target)
Expand Down
5 changes: 4 additions & 1 deletion lib/galaxy/tools/parameters/grouping.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,10 @@ def get_composite_dataset_name(self, context):
if dataset_name is None:
filenames = list()
for composite_file in context.get('files', []):
filenames.append(composite_file.get('file_data', {}).get('filename', ''))
if not composite_file.get('ftp_files', ''):
filenames.append(composite_file.get('file_data', {}).get('filename', ''))
else:
filenames.append(composite_file.get('ftp_files', [])[0])
dataset_name = os.path.commonprefix(filenames).rstrip('.') or None
if dataset_name is None:
dataset_name = 'Uploaded Composite Dataset (%s)' % self.get_file_type(context)
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/tools/verify/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def verify(
if attributes is None:
attributes = {}
compare = attributes.get('compare', 'diff')
if attributes.get('ftype', None) == 'bam':
if attributes.get('ftype', None) in ['bam', 'qname_sorted.bam', 'qname_input_sorted.bam', 'unsorted.bam']:
local_fh, temp_name = _bam_to_sam(local_name, temp_name)
local_name = local_fh.name
if compare == 'diff':
Expand Down
29 changes: 11 additions & 18 deletions lib/galaxy/visualization/plugins/interactive_environments.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import stat
import string
import tempfile
import time
import uuid
from itertools import product
from subprocess import PIPE, Popen
Expand All @@ -16,7 +15,7 @@
from six.moves import configparser, shlex_quote

from galaxy import model, web
from galaxy.containers import build_container_interfaces
from galaxy.containers import build_container_interfaces, ContainerPort
from galaxy.managers import api_keys
from galaxy.tools.deps.docker_util import DockerVolume
from galaxy.util import string_as_bool_or_none
Expand Down Expand Up @@ -443,25 +442,19 @@ def _launch_container_interface(self, image, env_override, volumes):
"""
run_args = self.container_run_args(image, env_override, volumes)
container = self.attr.container_interface.run_in_container(None, **run_args)
attempt = 0
container_ports = container.ports
while container_ports is None and attempt < 30:
# TODO: it would be better to do this in /interactive_environments/ready so the client doesn't block here,
# but _find_port_mapping needs certain non-persisted data (the port configured to be published) and the
# proxy manager doesn't have an update method, so that'd require bigger changes than I have the time for
# right now
attempt += 1
log.warning("Sleeping for 2 seconds while waiting for container %s ports", container.id)
time.sleep(2)
container_ports = container.ports
if container_ports is None:
raise Exception("Failed to determine ports for container '%s' after 30 attempts" % container.id)
container_port = self._find_port_mapping(container_ports)
log.debug("Container '%s' accessible at: %s:%s", container.id, container_port.hostaddr, container_port.hostport)
container_port = container.map_port(self.attr.docker_connect_port)
if not container_port:
log.warning("Container %s (%s) created but no port information available, readiness check will determine "
"ports", container.name, container.id)
container_port = ContainerPort(self.attr.docker_connect_port, None, None, None)
# a negated docker_connect_port will be stored in the proxy to indicate that the readiness check should
# attempt to determine the port
log.debug("Container %s (%s) port %s accessible at: %s:%s", container.name, container.id, container_port.port,
container_port.hostaddr, container_port.hostport)
self.attr.proxy_request = self.trans.app.proxy_manager.setup_proxy(
self.trans,
host=container_port.hostaddr,
port=container_port.hostport,
port=container_port.hostport or -container_port.port,
proxy_prefix=self.attr.proxy_prefix,
route_name=self.attr.viz_id,
container_ids=[container.id],
Expand Down
35 changes: 35 additions & 0 deletions lib/galaxy/web/proxy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,11 @@

class ProxyManager(object):

valid_update_keys = (
'host',
'port',
)

def __init__(self, config):
for option in ["manage_dynamic_proxy", "dynamic_proxy_bind_port",
"dynamic_proxy_bind_ip", "dynamic_proxy_debug",
Expand Down Expand Up @@ -84,6 +89,13 @@ def setup_proxy(self, trans, host=DEFAULT_PROXY_TO_HOST, port=None, proxy_prefix
'proxied_host': proxy_requests.host,
}

def update_proxy(self, trans, **kwargs):
authentication = AuthenticationToken(trans)
for k in kwargs.keys():
if k not in self.valid_update_keys:
raise Exception("Invalid proxy request update key: %s" % k)
return self.proxy_ipc.update_requests(authentication, **kwargs)

def query_proxy(self, trans):
authentication = AuthenticationToken(trans)
return self.proxy_ipc.fetch_requests(authentication)
Expand Down Expand Up @@ -211,6 +223,15 @@ def handle_requests(self, authentication, proxy_requests, route_name, container_
new_json_data = json.dumps(session_map)
open(self.proxy_session_map, "w").write(new_json_data)

def update_requests(self, authentication, host=None, port=None):
key = authentication.cookie_value
with FileLock(self.proxy_session_map):
session_map = json.load(open(self.proxy_session_map))
session_map[key]['host'] = host
session_map[key]['port'] = port
new_json_data = json.dumps(session_map)
open(self.proxy_session_map, "w").write(new_json_data)

def fetch_requests(self, authentication):
key = authentication.cookie_value
try:
Expand Down Expand Up @@ -264,6 +285,20 @@ def handle_requests(self, authentication, proxy_requests, route_name, container_
finally:
conn.close()

def update_requests(self, authentication, host=None, port=None):
key = authentication.cookie_value
with FileLock(self.proxy_session_map):
conn = sqlite.connect(self.proxy_session_map)
try:
c = conn.cursor()
update = '''UPDATE gxproxy2
SET host = ?, port = ?
WHERE key = ?'''
c.execute(update, (host, port, key))
conn.commit()
finally:
conn.close()

def fetch_requests(self, authentication):
key = authentication.cookie_value
with FileLock(self.proxy_session_map):
Expand Down
Loading

0 comments on commit 44fc5d6

Please sign in to comment.