Skip to content

Commit

Permalink
Add in a few more features from CCP4ScalerA, some refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
jbeilstenedmands committed Sep 14, 2018
1 parent eb2f8ce commit 5feb90f
Show file tree
Hide file tree
Showing 4 changed files with 233 additions and 167 deletions.
3 changes: 3 additions & 0 deletions Handlers/Phil.py
Original file line number Diff line number Diff line change
Expand Up @@ -668,6 +668,9 @@
reference_reflection_file = None
.type = path
.help = "Reference file for testing of alternative indexing schemes"
reference_experiment_file = None
.type = path
.help = "Reference experiments.json for testing of alternative indexing schemes"
model = *decay *modulation *absorption partiality
.type = choice(multi=True)
.short_caption = "Scaling models to apply"
Expand Down
4 changes: 2 additions & 2 deletions Modules/Scaler/CCP4ScalerA.py
Original file line number Diff line number Diff line change
Expand Up @@ -1213,7 +1213,7 @@ def _scale(self):
# Map generation may fail for number of reasons, eg. matplotlib borken
Debug.write("Could not generate absorption map (%s)" % e)

def _update_scaled_unit_cell(self):
'''def _update_scaled_unit_cell(self):
# FIXME this could be brought in-house
params = PhilIndex.params
Expand Down Expand Up @@ -1332,7 +1332,7 @@ def _update_scaled_unit_cell(self):
cif_out['_cell_%s' % cifname] = cell
Debug.write('%7.3f %7.3f %7.3f %7.3f %7.3f %7.3f' % \
self._scalr_cell)
self._scalr_cell)'''

def _generate_absorption_map(self, scaler):
output = scaler.get_all_output()
Expand Down
122 changes: 122 additions & 0 deletions Modules/Scaler/CommonScaler.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@
from xia2.Handlers.Files import FileHandler
from xia2.Handlers.Phil import PhilIndex
from xia2.Handlers.Streams import Chatter, Debug
from xia2.Handlers.CIF import CIF, mmCIF
from xia2.lib.bits import nifty_power_of_ten
from xia2.Modules.AnalyseMyIntensities import AnalyseMyIntensities
from xia2.Modules import MtzUtils
from xia2.Modules.CCP4InterRadiationDamageDetector import \
CCP4InterRadiationDamageDetector
Expand Down Expand Up @@ -1112,6 +1114,126 @@ def _iotbx_merging_statistics(self, scaled_unmerged_mtz, anomalous=False, d_min=

return result

def _update_scaled_unit_cell(self):

params = PhilIndex.params
fast_mode = params.dials.fast_mode
if (params.xia2.settings.integrater == 'dials' and not fast_mode
and params.xia2.settings.scale.two_theta_refine):
from xia2.Wrappers.Dials.TwoThetaRefine import TwoThetaRefine
from xia2.lib.bits import auto_logfiler

Chatter.banner('Unit cell refinement')

# Collect a list of all sweeps, grouped by project, crystal, wavelength
groups = {}
self._scalr_cell_dict = {}
tt_refine_experiments = []
tt_refine_pickles = []
tt_refine_reindex_ops = []
for epoch in self._sweep_handler.get_epochs():
si = self._sweep_handler.get_sweep_information(epoch)
pi = '_'.join(si.get_project_info())
intgr = si.get_integrater()
groups[pi] = groups.get(pi, []) + \
[(intgr.get_integrated_experiments(),
intgr.get_integrated_reflections(),
intgr.get_integrater_reindex_operator())]

# Two theta refine the unit cell for each group
p4p_file = os.path.join(self.get_working_directory(),
'%s_%s.p4p' % (self._scalr_pname, self._scalr_xname))
for pi in groups.keys():
tt_grouprefiner = TwoThetaRefine()
tt_grouprefiner.set_working_directory(self.get_working_directory())
auto_logfiler(tt_grouprefiner)
args = zip(*groups[pi])
tt_grouprefiner.set_experiments(args[0])
tt_grouprefiner.set_pickles(args[1])
tt_grouprefiner.set_output_p4p(p4p_file)
tt_refine_experiments.extend(args[0])
tt_refine_pickles.extend(args[1])
tt_refine_reindex_ops.extend(args[2])
reindex_ops = args[2]
from cctbx.sgtbx import change_of_basis_op as cb_op
if self._spacegroup_reindex_operator is not None:
reindex_ops = [(
cb_op(str(self._spacegroup_reindex_operator)) * cb_op(str(op))).as_hkl()
if op is not None else self._spacegroup_reindex_operator
for op in reindex_ops]
tt_grouprefiner.set_reindex_operators(reindex_ops)
tt_grouprefiner.run()
Chatter.write('%s: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f' % \
tuple([''.join(pi.split('_')[2:])] + list(tt_grouprefiner.get_unit_cell())))
self._scalr_cell_dict[pi] = (tt_grouprefiner.get_unit_cell(), tt_grouprefiner.get_unit_cell_esd(), tt_grouprefiner.import_cif(), tt_grouprefiner.import_mmcif())
if len(groups) > 1:
cif_in = tt_grouprefiner.import_cif()
cif_out = CIF.get_block(pi)
for key in sorted(cif_in.keys()):
cif_out[key] = cif_in[key]
mmcif_in = tt_grouprefiner.import_mmcif()
mmcif_out = mmCIF.get_block(pi)
for key in sorted(mmcif_in.keys()):
mmcif_out[key] = mmcif_in[key]

# Two theta refine everything together
if len(groups) > 1:
tt_refiner = TwoThetaRefine()
tt_refiner.set_working_directory(self.get_working_directory())
tt_refiner.set_output_p4p(p4p_file)
auto_logfiler(tt_refiner)
tt_refiner.set_experiments(tt_refine_experiments)
tt_refiner.set_pickles(tt_refine_pickles)
if self._spacegroup_reindex_operator is not None:
reindex_ops = [(
cb_op(str(self._spacegroup_reindex_operator)) * cb_op(str(op))).as_hkl()
if op is not None else self._spacegroup_reindex_operator
for op in tt_refine_reindex_ops]
tt_refiner.set_reindex_operators(reindex_ops)
tt_refiner.run()
self._scalr_cell = tt_refiner.get_unit_cell()
Chatter.write('Overall: %6.2f %6.2f %6.2f %6.2f %6.2f %6.2f' % tt_refiner.get_unit_cell())
self._scalr_cell_esd = tt_refiner.get_unit_cell_esd()
cif_in = tt_refiner.import_cif()
mmcif_in = tt_refiner.import_mmcif()
else:
self._scalr_cell, self._scalr_cell_esd, cif_in, mmcif_in = self._scalr_cell_dict.values()[0]
if params.xia2.settings.small_molecule == True:
FileHandler.record_data_file(p4p_file)

import dials.util.version
cif_out = CIF.get_block('xia2')
mmcif_out = mmCIF.get_block('xia2')
cif_out['_computing_cell_refinement'] = mmcif_out['_computing.cell_refinement'] = 'DIALS 2theta refinement, %s' % dials.util.version.dials_version()
for key in sorted(cif_in.keys()):
cif_out[key] = cif_in[key]
for key in sorted(mmcif_in.keys()):
mmcif_out[key] = mmcif_in[key]

Debug.write('Unit cell obtained by two-theta refinement')

else:
ami = AnalyseMyIntensities()
ami.set_working_directory(self.get_working_directory())

average_unit_cell, ignore_sg = ami.compute_average_cell(
[self._scalr_scaled_refl_files[key] for key in
self._scalr_scaled_refl_files])

Debug.write('Computed average unit cell (will use in all files)')
self._scalr_cell = average_unit_cell
self._scalr_cell_esd = None

# Write average unit cell to .cif
cif_out = CIF.get_block('xia2')
cif_out['_computing_cell_refinement'] = 'AIMLESS averaged unit cell'
for cell, cifname in zip(self._scalr_cell,
['length_a', 'length_b', 'length_c', 'angle_alpha', 'angle_beta', 'angle_gamma']):
cif_out['_cell_%s' % cifname] = cell

Debug.write('%7.3f %7.3f %7.3f %7.3f %7.3f %7.3f' % \
self._scalr_cell)

def anomalous_probability_plot(intensities, expected_delta=None):
from scitbx.math import distributions
from scitbx.array_family import flex
Expand Down
Loading

0 comments on commit 5feb90f

Please sign in to comment.