Skip to content

Commit

Permalink
Merge branch 'dev' into remove_toolshed_grids
Browse files Browse the repository at this point in the history
  • Loading branch information
guerler committed Oct 4, 2017
2 parents 504e2aa + f4eb2dc commit 98fa946
Show file tree
Hide file tree
Showing 71 changed files with 2,188 additions and 539 deletions.
15 changes: 14 additions & 1 deletion client/galaxy/scripts/mvc/history/history-view.js
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,9 @@ var HistoryView = _super.extend(
// control contents/behavior based on where (and in what context) the panel is being used
/** where should pages from links be displayed? (default to new tab/window) */
this.linkTarget = attributes.linkTarget || '_blank';

/** timeout id for detailed fetch of collection counts, etc... */
this.detailedFetchTimeoutId = null;
},

/** create and return a collection for when none is initially passed */
Expand All @@ -89,9 +92,18 @@ var HistoryView = _super.extend(
if( this.model ){
this.model.clearUpdateTimeout();
}
this._clearDetailedFetchTimeout();
return this;
},

/** clear the timeout and the cached timeout id */
_clearDetailedFetchTimeout : function(){
if( this.detailedFetchTimeoutId ){
clearTimeout( this.detailedFetchTimeoutId );
this.detailedFetchTimeoutId = null;
}
},

/** create any event listeners for the panel
* @fires: rendered:initial on the first render
* @fires: empty-history when switching to a history with no contents or creating a new history
Expand All @@ -105,7 +117,8 @@ var HistoryView = _super.extend(
'loading-done' : function(){
var self = this;
// after the initial load, decorate with more time consuming fields (like HDCA element_counts)
_.delay( function(){
self.detailedFetchTimeoutId = _.delay( function(){
self.detailedFetchTimeoutId = null;
self.model.contents.fetchCollectionCounts();
}, self.FETCH_COLLECTION_COUNTS_DELAY );
},
Expand Down
2 changes: 2 additions & 0 deletions config/datatypes_conf.xml.sample
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,7 @@
<datatype extension="sf3" type="galaxy.datatypes.proteomics:Sf3" display_in_upload="true" />
<datatype extension="cps" type="galaxy.datatypes.binary:Binary" subclass="true" display_in_upload="true" />
<datatype extension="ct" type="galaxy.datatypes.tabular:ConnectivityTable" display_in_upload="true"/>
<datatype extension="postgresql" type="galaxy.datatypes.binary:PostgresqlArchive" subclass="True" display_in_upload="True"/>
<datatype extension="searchgui_archive" type="galaxy.datatypes.binary:SearchGuiArchive" display_in_upload="true"/>
<datatype extension="fast5.tar" type="galaxy.datatypes.binary:Fast5Archive" display_in_upload="true"/>
<datatype extension="fast5.tar.gz" type="galaxy.datatypes.binary:Fast5ArchiveGz" display_in_upload="true"/>
Expand Down Expand Up @@ -679,6 +680,7 @@
<sniffer type="galaxy.datatypes.binary:Fast5ArchiveGz" />
<sniffer type="galaxy.datatypes.binary:Fast5ArchiveBz2" />
<sniffer type="galaxy.datatypes.binary:Fast5Archive" />
<sniffer type="galaxy.datatypes.binary:PostgresqlArchive"/>
<sniffer type="galaxy.datatypes.triples:Rdf"/>
<sniffer type="galaxy.datatypes.blast:BlastXml"/>
<sniffer type="galaxy.datatypes.xml:Phyloxml"/>
Expand Down
1 change: 1 addition & 0 deletions config/galaxy.ini.sample
Original file line number Diff line number Diff line change
Expand Up @@ -779,6 +779,7 @@ nglims_config_file = tool-data/nglims.yaml

# Verbosity of console log messages. Acceptable values can be found here:
# https://docs.python.org/2/library/logging.html#logging-levels
# A custom debug level of "TRACE" is available for even more verbosity.
#log_level = DEBUG

# Print database operations to the server log (warning, quite verbose!).
Expand Down
14 changes: 0 additions & 14 deletions lib/galaxy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,3 @@

from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)


# compat: BadZipFile introduced in Python 2.7
import zipfile
if not hasattr(zipfile, 'BadZipFile'):
zipfile.BadZipFile = zipfile.error

# compat: patch to add the NullHandler class to logging
import logging
if not hasattr(logging, 'NullHandler'):
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.NullHandler = NullHandler
2 changes: 1 addition & 1 deletion lib/galaxy/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def shutdown(self):

def configure_fluent_log(self):
if self.config.fluent_log:
from galaxy.util.log.fluent_log import FluentTraceLogger
from galaxy.util.logging.fluent_log import FluentTraceLogger
self.trace_logger = FluentTraceLogger('galaxy', self.config.fluent_host, self.config.fluent_port)
else:
self.trace_logger = None
Expand Down
2 changes: 2 additions & 0 deletions lib/galaxy/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from galaxy.util import string_as_bool
from galaxy.util import unicodify
from galaxy.util.dbkeys import GenomeBuilds
from galaxy.util.logging import LOGLV_TRACE
from galaxy.web.formatting import expand_pretty_datetime_format
from galaxy.web.stack import register_postfork_function
from .version import VERSION_MAJOR
Expand Down Expand Up @@ -876,6 +877,7 @@ def configure_logging(config):
or a simple dictionary of configuration variables.
"""
# Get root logger
logging.addLevelName(LOGLV_TRACE, "TRACE")
root = logging.getLogger()
# PasteScript will have already configured the logger if the
# 'loggers' section was found in the config file, otherwise we do
Expand Down
53 changes: 53 additions & 0 deletions lib/galaxy/datatypes/binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -1566,6 +1566,59 @@ def sniff(self, filename):
OxliGraphLabels)


class PostgresqlArchive(CompressedArchive):
"""
Class describing a Postgresql database packed into a tar archive
>>> from galaxy.datatypes.sniff import get_test_fname
>>> fname = get_test_fname( 'postgresql_fake.tar.bz2' )
>>> PostgresqlArchive().sniff( fname )
True
>>> fname = get_test_fname( 'test.fast5.tar' )
>>> PostgresqlArchive().sniff( fname )
False
"""
MetadataElement(name="version", default=None, param=MetadataParameter, desc="PostgreSQL database version",
readonly=True, visible=True, no_value=None)
file_ext = "postgresql"

def set_meta(self, dataset, overwrite=True, **kwd):
super(PostgresqlArchive, self).set_meta(dataset, overwrite=overwrite, **kwd)
try:
if dataset and tarfile.is_tarfile(dataset.file_name):
with tarfile.open(dataset.file_name, 'r') as temptar:
pg_version_file = temptar.extractfile('postgresql/db/PG_VERSION')
dataset.metadata.version = pg_version_file.read().strip()
except Exception as e:
log.warning('%s, set_meta Exception: %s', self, e)

def sniff(self, filename):
if filename and tarfile.is_tarfile(filename):
try:
with tarfile.open(filename, 'r') as temptar:
return 'postgresql/db/PG_VERSION' in temptar.getnames()
except Exception as e:
log.warning('%s, sniff Exception: %s', self, e)
return False

def set_peek(self, dataset, is_multi_byte=False):
if not dataset.dataset.purged:
dataset.peek = "PostgreSQL Archive (%s)" % (nice_size(dataset.get_size()))
dataset.blurb = "PostgreSQL version %s" % (dataset.metadata.version or 'unknown')
else:
dataset.peek = 'file does not exist'
dataset.blurb = 'file purged from disk'

def display_peek(self, dataset):
try:
return dataset.peek
except:
return "PostgreSQL Archive (%s)" % (nice_size(dataset.get_size()))


Binary.register_sniffable_binary_format("postgresql_archiv", "postgresql", PostgresqlArchive)


class Fast5Archive(CompressedArchive):
"""
Class describing a FAST5 archive
Expand Down
Binary file added lib/galaxy/datatypes/test/postgresql_fake.tar.bz2
Binary file not shown.
5 changes: 3 additions & 2 deletions lib/galaxy/jobs/actions/post.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
immediate_actions listed below. Currently only used in workflows.
"""
import datetime
import logging
import socket

from markupsafe import escape

from galaxy.util import send_mail
from galaxy.util.logging import get_logger

log = logging.getLogger(__name__)
log = get_logger(__name__)


class DefaultJobAction(object):
Expand Down Expand Up @@ -308,6 +308,7 @@ def execute(cls, app, sa_session, action, job, replacement_dict):
safe_to_delete = True
for job_to_check in [d_j.job for d_j in input_dataset.dependent_jobs]:
if job_to_check != job and job_to_check.state not in [job.states.OK, job.states.DELETED]:
log.trace("Workflow Intermediates cleanup attempted, but non-terminal state '%s' detected for job %s" % (job_to_check.state, job_to_check.id))
safe_to_delete = False
if safe_to_delete:
# Support purging here too.
Expand Down

0 comments on commit 98fa946

Please sign in to comment.