IMP logo
IMP Reference Guide  2.6.0
The Integrative Modeling Platform
tools.py
1 #!/usr/bin/env python
2 
3 """@namespace IMP.pmi.tools
4  Miscellaneous utilities.
5 """
6 
7 from __future__ import print_function
8 import IMP
9 import IMP.algebra
10 import IMP.isd
11 import IMP.pmi
12 import IMP.pmi.topology
13 import collections
14 import itertools
15 from math import log,pi,sqrt,exp
16 import sys,os
17 import random
18 import time
19 import RMF
20 import IMP.rmf
21 from collections import defaultdict
22 try:
23  from collections import OrderedDict
24 except ImportError:
25  from IMP.pmi._compat_collections import OrderedDict
26 
27 def _get_restraint_set_key():
28  if not hasattr(_get_restraint_set_key, 'pmi_rs_key'):
29  _get_restraint_set_key.pmi_rs_key = IMP.ModelKey("PMI restraints")
30  return _get_restraint_set_key.pmi_rs_key
31 
32 def _add_restraint_set(model, mk):
33  rs = IMP.RestraintSet(model, "All PMI restraints")
34  model.add_data(mk, rs)
35  return rs
36 
37 def add_restraint_to_model(model, restraint):
38  """Add a PMI restraint to the model.
39  Since Model.add_restraint() no longer exists (in modern IMP restraints
40  should be added to a ScoringFunction instead) store them instead in
41  a RestraintSet, and keep a reference to it in the Model."""
42  mk = _get_restraint_set_key()
43  if model.get_has_data(mk):
44  rs = IMP.RestraintSet.get_from(model.get_data(mk))
45  else:
46  rs = _add_restraint_set(model, mk)
47  rs.add_restraint(restraint)
48 
49 def get_restraint_set(model):
50  """Get a RestraintSet containing all PMI restraints added to the model"""
51  mk = _get_restraint_set_key()
52  if not model.get_has_data(mk):
53  print("WARNING: no restraints added to model yet")
54  _add_restraint_set(model, mk)
55  return IMP.RestraintSet.get_from(model.get_data(mk))
56 
57 class Stopwatch(object):
58 
59  def __init__(self, isdelta=True):
60 
61  self.starttime = time.clock()
62  self.label = "None"
63  self.isdelta = isdelta
64 
65  def set_label(self, labelstr):
66  self.label = labelstr
67 
68  def get_output(self):
69  output = {}
70  if self.isdelta:
71  newtime = time.clock()
72  output[
73  "Stopwatch_" +
74  self.label +
75  "_delta_seconds"] = str(
76  newtime -
77  self.starttime)
78  self.starttime = newtime
79  else:
80  output["Stopwatch_" + self.label +
81  "_elapsed_seconds"] = str(time.clock() - self.starttime)
82  return output
83 
84 
85 class SetupNuisance(object):
86 
87  def __init__(self, m, initialvalue, minvalue, maxvalue, isoptimized=True):
88 
89  nuisance = IMP.isd.Scale.setup_particle(IMP.Particle(m), initialvalue)
90  if minvalue:
91  nuisance.set_lower(minvalue)
92  if maxvalue:
93  nuisance.set_upper(maxvalue)
94 
95  # m.add_score_state(IMP.core.SingletonConstraint(IMP.isd.NuisanceRangeModifier(),None,nuisance))
96  nuisance.set_is_optimized(nuisance.get_nuisance_key(), isoptimized)
97  self.nuisance = nuisance
98 
99  def get_particle(self):
100  return self.nuisance
101 
102 
103 class SetupWeight(object):
104 
105  def __init__(self, m, isoptimized=True):
106  pw = IMP.Particle(m)
107  self.weight = IMP.isd.Weight.setup_particle(pw)
108  self.weight.set_weights_are_optimized(True)
109 
110  def get_particle(self):
111  return self.weight
112 
113 
114 class ParticleToSampleFilter(object):
115  def __init__(self, sampled_objects):
116  self.sampled_objects=sampled_objects
117  self.filters=[]
118 
119  def add_filter(self,filter_string):
120  self.filters.append(filter_string)
121 
122  def get_particles_to_sample(self):
123  particles_to_sample={}
124  for so in self.sampled_objects:
125  ps_dict=so.get_particles_to_sample()
126  for key in ps_dict:
127  for f in self.filters:
128  if f in key:
129  if key not in particles_to_sample:
130  particles_to_sample[key]=ps_dict[key]
131  else:
132  particles_to_sample[key]+=ps_dict[key]
133  return particles_to_sample
134 
135 class ParticleToSampleList(object):
136 
137  def __init__(self, label="None"):
138 
139  self.dictionary_particle_type = {}
140  self.dictionary_particle_transformation = {}
141  self.dictionary_particle_name = {}
142  self.label = label
143 
144  def add_particle(
145  self,
146  particle,
147  particle_type,
148  particle_transformation,
149  name):
150  if not particle_type in ["Rigid_Bodies", "Floppy_Bodies", "Nuisances", "X_coord", "Weights"]:
151  raise TypeError("not the right particle type")
152  else:
153  self.dictionary_particle_type[particle] = particle_type
154  if particle_type == "Rigid_Bodies":
155  if type(particle_transformation) == tuple and len(particle_transformation) == 2 and type(particle_transformation[0]) == float and type(particle_transformation[1]) == float:
156  self.dictionary_particle_transformation[
157  particle] = particle_transformation
158  self.dictionary_particle_name[particle] = name
159  else:
160  raise TypeError("ParticleToSampleList: not the right transformation format for Rigid_Bodies, should be a tuple a floats")
161  else:
162  if type(particle_transformation) == float:
163  self.dictionary_particle_transformation[
164  particle] = particle_transformation
165  self.dictionary_particle_name[particle] = name
166  else:
167  raise TypeError("ParticleToSampleList: not the right transformation format sould be a float")
168 
169  def get_particles_to_sample(self):
170  ps = {}
171  for particle in self.dictionary_particle_type:
172  key = self.dictionary_particle_type[
173  particle] + "ParticleToSampleList_" + self.dictionary_particle_name[particle] + "_" + self.label
174  value = (
175  [particle],
176  self.dictionary_particle_transformation[particle])
177  ps[key] = value
178  return ps
179 
180 
181 class Variance(object):
182 
183  def __init__(self, model, tau, niter, prot, th_profile, write_data=False):
184 
185  self.model = model
186  self.write_data = write_data
187  self.tau = tau
188  self.niter = niter
189  #! select particles from the model
190  particles = IMP.atom.get_by_type(prot, IMP.atom.ATOM_TYPE)
191  self.particles = particles
192  # store reference coordinates and theoretical profile
193  self.refpos = [IMP.core.XYZ(p).get_coordinates() for p in particles]
194  self.model_profile = th_profile
195 
196  def perturb_particles(self, perturb=True):
197  for i, p in enumerate(self.particles):
198  newpos = array(self.refpos[i])
199  if perturb:
200  newpos += random.normal(0, self.tau, 3)
201  newpos = IMP.algebra.Vector3D(newpos)
202  IMP.core.XYZ(p).set_coordinates(newpos)
203 
204  def get_profile(self):
205  model_profile = self.model_profile
206  p = model_profile.calculate_profile(self.particles, IMP.saxs.CA_ATOMS)
207  return array([model_profile.get_intensity(i) for i in
208  range(model_profile.size())])
209 
210  def init_variances(self):
211  # create placeholders
212  N = self.model_profile.size()
213  a = self.profiles[0][:]
214  self.m = matrix(a).T # Nx1
215  self.V = self.m * self.m.T
216  self.normm = linalg.norm(self.m)
217  self.normV = linalg.norm(self.V)
218 
219  def update_variances(self):
220  a = matrix(self.profiles[-1]) # 1xN
221  n = float(len(self.profiles))
222  self.m = a.T / n + (n - 1) / n * self.m
223  self.V = a.T * a + self.V
224  self.oldnormm = self.normm
225  self.oldnormV = self.normV
226  self.normm = linalg.norm(self.m)
227  self.normV = linalg.norm(self.V)
228  self.diffm = (self.oldnormm - self.normm) / self.oldnormm
229  self.diffV = (self.oldnormV - self.normV) / self.oldnormV
230 
231  def get_direct_stats(self, a):
232  nq = len(a[0])
233  nprof = len(a)
234  m = [0] * nq
235  for prof in a:
236  for q, I in enumerate(prof):
237  m[q] += I
238  m = array(m) / nprof
239  V = matrix(a)
240  V = V.T * V
241  Sigma = (matrix(a - m))
242  Sigma = Sigma.T * Sigma / (nprof - 1)
243  mi = matrix(diag(1. / m))
244  Sigmarel = mi.T * Sigma * mi
245  return m, V, Sigma, Sigmarel
246 
247  def store_data(self):
248  if not os.path.isdir('data'):
249  os.mkdir('data')
250  profiles = matrix(self.profiles)
251  self.directm, self.directV, self.Sigma, self.Sigmarel = \
252  self.get_direct_stats(array(profiles))
253  directV = self.directV
254  # print "V comparison",(linalg.norm(directV-self.V)/self.normV)
255  save('data/profiles', profiles)
256  # absolute profile differences
257  fl = open('data/profiles.dat', 'w')
258  for i, l in enumerate(array(profiles).T):
259  self.model_profile.get_q(i)
260  fl.write('%s ' % i)
261  for k in l:
262  fl.write('%s ' % (k - self.directm[i]))
263  fl.write('\n')
264  # relative profile differences
265  fl = open('data/profiles_rel.dat', 'w')
266  for i, l in enumerate(array(profiles).T):
267  self.model_profile.get_q(i)
268  fl.write('%s ' % i)
269  for k in l:
270  fl.write('%s ' % ((k - self.directm[i]) / self.directm[i]))
271  fl.write('\n')
272  save('data/m', self.directm)
273  save('data/V', self.directV)
274  Sigma = self.Sigma
275  save('data/Sigma', Sigma)
276  # Sigma matrix
277  fl = open('data/Sigma.dat', 'w')
278  model_profile = self.model_profile
279  for i in range(model_profile.size()):
280  qi = model_profile.get_q(i)
281  for j in range(model_profile.size()):
282  qj = model_profile.get_q(j)
283  vij = self.Sigma[i, j]
284  fl.write('%s %s %s\n' % (qi, qj, vij))
285  fl.write('\n')
286  # Sigma eigenvalues
287  fl = open('data/eigenvals', 'w')
288  for i in linalg.eigvalsh(Sigma):
289  fl.write('%s\n' % i)
290  Sigmarel = self.Sigmarel
291  save('data/Sigmarel', Sigmarel)
292  # Sigmarel matrix
293  fl = open('data/Sigmarel.dat', 'w')
294  model_profile = self.model_profile
295  for i in range(model_profile.size()):
296  qi = model_profile.get_q(i)
297  for j in range(model_profile.size()):
298  qj = model_profile.get_q(j)
299  vij = self.Sigmarel[i, j]
300  fl.write('%s %s %s\n' % (qi, qj, vij))
301  fl.write('\n')
302  # Sigma eigenvalues
303  fl = open('data/eigenvals_rel', 'w')
304  for i in linalg.eigvalsh(Sigmarel):
305  fl.write('%s\n' % i)
306  # mean profile
307  fl = open('data/mean.dat', 'w')
308  for i in range(len(self.directm)):
309  qi = self.model_profile.get_q(i)
310  fl.write('%s ' % qi)
311  fl.write('%s ' % self.directm[i])
312  fl.write('%s ' % sqrt(self.Sigma[i, i]))
313  fl.write('\n')
314 
315  def try_chol(self, jitter):
316  Sigma = self.Sigma
317  try:
318  linalg.cholesky(Sigma + matrix(eye(len(Sigma))) * jitter)
319  except linalg.LinAlgError:
320  print("Decomposition failed with jitter =", jitter)
321  return
322  print("Successful decomposition with jitter =", jitter)
323 
324  def run(self):
325  self.profiles = [self.get_profile()]
326  # self.init_variances()
327  for n in range(self.niter):
328  self.perturb_particles()
329  self.profiles.append(self.get_profile())
330  # self.update_variances()
331  #profiles = matrix(self.profiles)
332  # print n,self.diffm,self.diffV
333  # print
334  #
335  if self.write_data:
336  self.store_data()
337  # self.try_chol(0.)
338  # for i in logspace(-7,0,num=8):
339  # self.try_chol(i)
340 
341  def get_cov(self, relative=True):
342  if not relative:
343  return self.Sigma
344  else:
345  return self.Sigmarel
346 
347  #-------------------------------
348 
349 def get_random_cross_link_dataset(representation,
350  resolution=1.0,
351  number_of_cross_links=10,
352  ambiguity_probability=0.1,
353  confidence_score_range=[0,100],
354  avoid_same_particles=False):
355  '''Return a random cross-link dataset as a string.
356  Every line is a residue pair, together with UniqueIdentifier
357  and XL score.'''
358 
359  residue_pairs=get_random_residue_pairs(representation, resolution, number_of_cross_links, avoid_same_particles=avoid_same_particles)
360 
361  unique_identifier=0
362  cmin=float(min(confidence_score_range))
363  cmax=float(max(confidence_score_range))
364 
365  dataset="#\n"
366 
367  for (name1, r1, name2, r2) in residue_pairs:
368  if random.random() > ambiguity_probability:
369  unique_identifier+=1
370  score=random.random()*(cmax-cmin)+cmin
371  dataset+=str(name1)+" "+str(name2)+" "+str(r1)+" "+str(r2)+" "+str(score)+" "+str(unique_identifier)+"\n"
372 
373  return dataset
374 
375 
376  #-------------------------------
377 
378 def get_cross_link_data(directory, filename, dist, omega, sigma,
379  don=None, doff=None, prior=0, type_of_profile="gofr"):
380 
381  (distmin, distmax, ndist) = dist
382  (omegamin, omegamax, nomega) = omega
383  (sigmamin, sigmamax, nsigma) = sigma
384 
385  filen = IMP.isd.get_data_path("CrossLinkPMFs.dict")
386  xlpot = open(filen)
387 
388  for line in xlpot:
389  dictionary = eval(line)
390  break
391 
392  xpot = dictionary[directory][filename]["distance"]
393  pot = dictionary[directory][filename][type_of_profile]
394 
395  dist_grid = get_grid(distmin, distmax, ndist, False)
396  omega_grid = get_log_grid(omegamin, omegamax, nomega)
397  sigma_grid = get_log_grid(sigmamin, sigmamax, nsigma)
398 
399  if not don is None and not doff is None:
400  xlmsdata = IMP.isd.CrossLinkData(
401  dist_grid,
402  omega_grid,
403  sigma_grid,
404  xpot,
405  pot,
406  don,
407  doff,
408  prior)
409  else:
410  xlmsdata = IMP.isd.CrossLinkData(
411  dist_grid,
412  omega_grid,
413  sigma_grid,
414  xpot,
415  pot)
416  return xlmsdata
417 
418  #-------------------------------
419 
420 
421 def get_cross_link_data_from_length(length, xxx_todo_changeme3, xxx_todo_changeme4, xxx_todo_changeme5):
422  (distmin, distmax, ndist) = xxx_todo_changeme3
423  (omegamin, omegamax, nomega) = xxx_todo_changeme4
424  (sigmamin, sigmamax, nsigma) = xxx_todo_changeme5
425 
426  dist_grid = get_grid(distmin, distmax, ndist, False)
427  omega_grid = get_log_grid(omegamin, omegamax, nomega)
428  sigma_grid = get_log_grid(sigmamin, sigmamax, nsigma)
429 
430  xlmsdata = IMP.isd.CrossLinkData(dist_grid, omega_grid, sigma_grid, length)
431  return xlmsdata
432 
433 
434 def get_grid(gmin, gmax, ngrid, boundaries):
435  grid = []
436  dx = (gmax - gmin) / float(ngrid)
437  for i in range(0, ngrid + 1):
438  if(not boundaries and i == 0):
439  continue
440  if(not boundaries and i == ngrid):
441  continue
442  grid.append(gmin + float(i) * dx)
443  return grid
444 
445  #-------------------------------
446 
447 
448 def get_log_grid(gmin, gmax, ngrid):
449  grid = []
450  for i in range(0, ngrid + 1):
451  grid.append(gmin * exp(float(i) / ngrid * log(gmax / gmin)))
452  return grid
453 
454  #-------------------------------
455 
456 
458  '''
459  example '"{ID_Score}" > 28 AND "{Sample}" ==
460  "%10_1%" OR ":Sample}" == "%10_2%" OR ":Sample}"
461  == "%10_3%" OR ":Sample}" == "%8_1%" OR ":Sample}" == "%8_2%"'
462  '''
463 
464  import pyparsing as pp
465 
466  operator = pp.Regex(">=|<=|!=|>|<|==|in").setName("operator")
467  value = pp.QuotedString(
468  '"') | pp.Regex(
469  r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?")
470  identifier = pp.Word(pp.alphas, pp.alphanums + "_")
471  comparison_term = identifier | value
472  condition = pp.Group(comparison_term + operator + comparison_term)
473 
474  expr = pp.operatorPrecedence(condition, [
475  ("OR", 2, pp.opAssoc.LEFT, ),
476  ("AND", 2, pp.opAssoc.LEFT, ),
477  ])
478 
479  parsedstring = str(expr.parseString(inputstring)) \
480  .replace("[", "(") \
481  .replace("]", ")") \
482  .replace(",", " ") \
483  .replace("'", " ") \
484  .replace("%", "'") \
485  .replace("{", "float(entry['") \
486  .replace("}", "'])") \
487  .replace(":", "str(entry['") \
488  .replace("}", "'])") \
489  .replace("AND", "and") \
490  .replace("OR", "or")
491  return parsedstring
492 
493 
494 def open_file_or_inline_text(filename):
495  try:
496  fl = open(filename, "r")
497  except IOError:
498  fl = filename.split("\n")
499  return fl
500 
501 
502 def get_drmsd(prot0, prot1):
503  drmsd = 0.
504  npairs = 0.
505  for i in range(0, len(prot0) - 1):
506  for j in range(i + 1, len(prot0)):
507  dist0 = IMP.core.get_distance(prot0[i], prot0[j])
508  dist1 = IMP.core.get_distance(prot1[i], prot1[j])
509  drmsd += (dist0 - dist1) ** 2
510  npairs += 1.
511  return math.sqrt(drmsd / npairs)
512 
513  #-------------------------------
514 
515 
516 def get_ids_from_fasta_file(fastafile):
517  ids = []
518  with open(fastafile) as ff:
519  for l in ff:
520  if l[0] == ">":
521  ids.append(l[1:-1])
522  return ids
523 
524 
525 def get_closest_residue_position(hier, resindex, terminus="N"):
526  '''
527  this function works with plain hierarchies, as read from the pdb,
528  no multi-scale hierarchies
529  '''
530  p = []
531  niter = 0
532  while len(p) == 0:
533  niter += 1
534  sel = IMP.atom.Selection(hier, residue_index=resindex,
535  atom_type=IMP.atom.AT_CA)
536 
537  if terminus == "N":
538  resindex += 1
539  if terminus == "C":
540  resindex -= 1
541 
542  if niter >= 10000:
543  print("get_closest_residue_position: exiting while loop without result")
544  break
545  p = sel.get_selected_particles()
546 
547  if len(p) == 1:
548  return IMP.core.XYZ(p[0]).get_coordinates()
549  elif len(p) == 0:
550  print("get_closest_residue_position: got NO residues for hierarchy %s and residue %i" % (hier, resindex))
551  raise Exception("get_closest_residue_position: got NO residues for hierarchy %s and residue %i" % (
552  hier, resindex))
553  else:
554  raise ValueError("got multiple residues for hierarchy %s and residue %i; the list of particles is %s" % (hier, resindex, str([pp.get_name() for pp in p])))
555 
556 @IMP.deprecated_function("2.6", "Use get_terminal_residue_position() instead.")
557 def get_position_terminal_residue(hier, terminus="C", resolution=1):
558  '''
559  Get the xyz position of the terminal residue at the given resolution.
560  @param hier hierarchy containing the terminal residue
561  @param terminus either 'N' or 'C'
562  @param resolution resolution to use.
563  '''
564  termresidue = None
565  termparticle = None
566  for p in IMP.atom.get_leaves(hier):
567  if IMP.pmi.Resolution(p).get_resolution() == resolution:
569  if terminus == "C":
570  if max(residues) >= termresidue and not termresidue is None:
571  termresidue = max(residues)
572  termparticle = p
573  elif termresidue is None:
574  termresidue = max(residues)
575  termparticle = p
576  elif terminus == "N":
577  if min(residues) <= termresidue and not termresidue is None:
578  termresidue = min(residues)
579  termparticle = p
580  elif termresidue is None:
581  termresidue = min(residues)
582  termparticle = p
583  else:
584  raise ValueError("terminus argument should be either N or C")
585 
586  return IMP.core.XYZ(termparticle).get_coordinates()
587 
588 def get_terminal_residue_position(representation,hier, terminus="C", resolution=1):
589  '''
590  Get the xyz position of the terminal residue at the given resolution.
591  @param hier hierarchy containing the terminal residue
592  @param terminus either 'N' or 'C'
593  @param resolution resolution to use.
594  '''
595  termresidue = None
596  termparticle = None
597 
598  ps=select(representation,
599  resolution=resolution,
600  hierarchies=[hier])
601 
602  for p in ps:
603  if IMP.pmi.Resolution(p).get_resolution() == resolution:
605  if terminus == "C":
606  if max(residues) >= termresidue and not termresidue is None:
607  termresidue = max(residues)
608  termparticle = p
609  elif termresidue is None:
610  termresidue = max(residues)
611  termparticle = p
612  elif terminus == "N":
613  if min(residues) <= termresidue and not termresidue is None:
614  termresidue = min(residues)
615  termparticle = p
616  elif termresidue is None:
617  termresidue = min(residues)
618  termparticle = p
619  else:
620  raise ValueError("terminus argument should be either N or C")
621 
622  return IMP.core.XYZ(termparticle).get_coordinates()
623 
624 
625 def get_residue_gaps_in_hierarchy(hierarchy, start, end):
626  '''
627  Return the residue index gaps and contiguous segments in the hierarchy.
628 
629  @param hierarchy hierarchy to examine
630  @param start first residue index
631  @param end last residue index
632 
633  @return A list of lists of the form
634  [[1,100,"cont"],[101,120,"gap"],[121,200,"cont"]]
635  '''
636  gaps = []
637  for n, rindex in enumerate(range(start, end + 1)):
638  sel = IMP.atom.Selection(hierarchy, residue_index=rindex,
639  atom_type=IMP.atom.AT_CA)
640 
641  if len(sel.get_selected_particles()) == 0:
642  if n == 0:
643  # set the initial condition
644  rindexgap = start
645  rindexcont = start - 1
646  if rindexgap == rindex - 1:
647  # residue is contiguous with the previously discovered gap
648  gaps[-1][1] += 1
649  else:
650  # residue is not contiguous with the previously discovered gap
651  # hence create a new gap tuple
652  gaps.append([rindex, rindex, "gap"])
653  # update the index of the last residue gap
654  rindexgap = rindex
655  else:
656  if n == 0:
657  # set the initial condition
658  rindexgap = start - 1
659  rindexcont = start
660  if rindexcont == rindex - 1:
661  # residue is contiguous with the previously discovered
662  # continuous part
663  gaps[-1][1] += 1
664  else:
665  # residue is not contiguous with the previously discovered continuous part
666  # hence create a new cont tuple
667  gaps.append([rindex, rindex, "cont"])
668  # update the index of the last residue gap
669  rindexcont = rindex
670  return gaps
671 
672 
673 class map(object):
674 
675  def __init__(self):
676  self.map = {}
677 
678  def set_map_element(self, xvalue, yvalue):
679  self.map[xvalue] = yvalue
680 
681  def get_map_element(self, invalue):
682  if type(invalue) == float:
683  n = 0
684  mindist = 1
685  for x in self.map:
686  dist = (invalue - x) * (invalue - x)
687 
688  if n == 0:
689  mindist = dist
690  minx = x
691  if dist < mindist:
692  mindist = dist
693  minx = x
694  n += 1
695  return self.map[minx]
696  elif type(invalue) == str:
697  return self.map[invalue]
698  else:
699  raise TypeError("wrong type for map")
700 
701 
702 def select(representation,
703  resolution=None,
704  hierarchies=None,
705  selection_arguments=None,
706  name=None,
707  name_is_ambiguous=False,
708  first_residue=None,
709  last_residue=None,
710  residue=None,
711  representation_type=None):
712  '''
713  this function uses representation=SimplifiedModel
714  it returns the corresponding selected particles
715  representation_type="Beads", "Res:X", "Densities", "Representation", "Molecule"
716  '''
717 
718  if resolution is None:
719  allparticles = IMP.atom.get_leaves(representation.prot)
720  resolution_particles = None
721  hierarchies_particles = None
722  names_particles = None
723  residue_range_particles = None
724  residue_particles = None
725  representation_type_particles = None
726 
727  if not resolution is None:
728  resolution_particles = []
729  hs = representation.get_hierarchies_at_given_resolution(resolution)
730  for h in hs:
731  resolution_particles += IMP.atom.get_leaves(h)
732 
733  if not hierarchies is None:
734  hierarchies_particles = []
735  for h in hierarchies:
736  hierarchies_particles += IMP.atom.get_leaves(h)
737 
738  if not name is None:
739  names_particles = []
740  if name_is_ambiguous:
741  for namekey in representation.hier_dict:
742  if name in namekey:
743  names_particles += IMP.atom.get_leaves(
744  representation.hier_dict[namekey])
745  elif name in representation.hier_dict:
746  names_particles += IMP.atom.get_leaves(representation.hier_dict[name])
747  else:
748  print("select: component %s is not there" % name)
749 
750  if not first_residue is None and not last_residue is None:
751  sel = IMP.atom.Selection(representation.prot,
752  residue_indexes=range(first_residue, last_residue + 1))
753  residue_range_particles = [IMP.atom.Hierarchy(p)
754  for p in sel.get_selected_particles()]
755 
756  if not residue is None:
757  sel = IMP.atom.Selection(representation.prot, residue_index=residue)
758  residue_particles = [IMP.atom.Hierarchy(p)
759  for p in sel.get_selected_particles()]
760 
761  if not representation_type is None:
762  representation_type_particles = []
763  if representation_type == "Molecule":
764  for name in representation.hier_representation:
765  for repr_type in representation.hier_representation[name]:
766  if repr_type == "Beads" or "Res:" in repr_type:
767  h = representation.hier_representation[name][repr_type]
768  representation_type_particles += IMP.atom.get_leaves(h)
769 
770  elif representation_type == "PDB":
771  for name in representation.hier_representation:
772  for repr_type in representation.hier_representation[name]:
773  if repr_type == "Res:" in repr_type:
774  h = representation.hier_representation[name][repr_type]
775  representation_type_particles += IMP.atom.get_leaves(h)
776 
777  else:
778  for name in representation.hier_representation:
779  h = representation.hier_representation[
780  name][
781  representation_type]
782  representation_type_particles += IMP.atom.get_leaves(h)
783 
784  selections = [hierarchies_particles, names_particles,
785  residue_range_particles, residue_particles, representation_type_particles]
786 
787  if resolution is None:
788  selected_particles = set(allparticles)
789  else:
790  selected_particles = set(resolution_particles)
791 
792  for s in selections:
793  if not s is None:
794  selected_particles = (set(s) & selected_particles)
795 
796  return list(selected_particles)
797 
798 
799 def select_by_tuple(
800  representation,
801  tupleselection,
802  resolution=None,
803  name_is_ambiguous=False):
804  if isinstance(tupleselection, tuple) and len(tupleselection) == 3:
805  particles = IMP.pmi.tools.select(representation, resolution=resolution,
806  name=tupleselection[2],
807  first_residue=tupleselection[0],
808  last_residue=tupleselection[1],
809  name_is_ambiguous=name_is_ambiguous)
810  elif isinstance(tupleselection, str):
811  particles = IMP.pmi.tools.select(representation, resolution=resolution,
812  name=tupleselection,
813  name_is_ambiguous=name_is_ambiguous)
814  else:
815  raise ValueError('you passed something bad to select_by_tuple()')
816  # now order the result by residue number
817  particles = IMP.pmi.tools.sort_by_residues(particles)
818 
819  return particles
820 
821 
822 def get_db_from_csv(csvfilename):
823  import csv
824  outputlist = []
825  with open(csvfilename) as fh:
826  csvr = csv.DictReader(fh)
827  for l in csvr:
828  outputlist.append(l)
829  return outputlist
830 
831 
832 class HierarchyDatabase(object):
833  """Store the representations for a system."""
834 
835  def __init__(self):
836  self.db = {}
837  # this dictionary map a particle to its root hierarchy
838  self.root_hierarchy_dict = {}
839  self.preroot_fragment_hierarchy_dict = {}
840  self.particle_to_name = {}
841  self.model = None
842 
843  def add_name(self, name):
844  if name not in self.db:
845  self.db[name] = {}
846 
847  def add_residue_number(self, name, resn):
848  resn = int(resn)
849  self.add_name(name)
850  if resn not in self.db[name]:
851  self.db[name][resn] = {}
852 
853  def add_resolution(self, name, resn, resolution):
854  resn = int(resn)
855  resolution = float(resolution)
856  self.add_name(name)
857  self.add_residue_number(name, resn)
858  if resolution not in self.db[name][resn]:
859  self.db[name][resn][resolution] = []
860 
861  def add_particles(self, name, resn, resolution, particles):
862  resn = int(resn)
863  resolution = float(resolution)
864  self.add_name(name)
865  self.add_residue_number(name, resn)
866  self.add_resolution(name, resn, resolution)
867  self.db[name][resn][resolution] += particles
868  for p in particles:
869  (rh, prf) = self.get_root_hierarchy(p)
870  self.root_hierarchy_dict[p] = rh
871  self.preroot_fragment_hierarchy_dict[p] = prf
872  self.particle_to_name[p] = name
873  if self.model is None:
874  self.model = particles[0].get_model()
875 
876  def get_model(self):
877  return self.model
878 
879  def get_names(self):
880  names = list(self.db.keys())
881  names.sort()
882  return names
883 
884  def get_particles(self, name, resn, resolution):
885  resn = int(resn)
886  resolution = float(resolution)
887  return self.db[name][resn][resolution]
888 
889  def get_particles_at_closest_resolution(self, name, resn, resolution):
890  resn = int(resn)
891  resolution = float(resolution)
892  closestres = min(self.get_residue_resolutions(name, resn),
893  key=lambda x: abs(float(x) - float(resolution)))
894  return self.get_particles(name, resn, closestres)
895 
896  def get_residue_resolutions(self, name, resn):
897  resn = int(resn)
898  resolutions = list(self.db[name][resn].keys())
899  resolutions.sort()
900  return resolutions
901 
902  def get_molecule_resolutions(self, name):
903  resolutions = set()
904  for resn in self.db[name]:
905  resolutions.update(list(self.db[name][resn].keys()))
906  resolutions.sort()
907  return resolutions
908 
909  def get_residue_numbers(self, name):
910  residue_numbers = list(self.db[name].keys())
911  residue_numbers.sort()
912  return residue_numbers
913 
914  def get_particles_by_resolution(self, name, resolution):
915  resolution = float(resolution)
916  particles = []
917  for resn in self.get_residue_numbers(name):
918  result = self.get_particles_at_closest_resolution(
919  name,
920  resn,
921  resolution)
922  pstemp = [p for p in result if p not in particles]
923  particles += pstemp
924  return particles
925 
926  def get_all_particles_by_resolution(self, resolution):
927  resolution = float(resolution)
928  particles = []
929  for name in self.get_names():
930  particles += self.get_particles_by_resolution(name, resolution)
931  return particles
932 
933  def get_root_hierarchy(self, particle):
934  prerootfragment = particle
935  while IMP.atom.Atom.get_is_setup(particle) or \
936  IMP.atom.Residue.get_is_setup(particle) or \
938  if IMP.atom.Atom.get_is_setup(particle):
939  p = IMP.atom.Atom(particle).get_parent()
940  elif IMP.atom.Residue.get_is_setup(particle):
941  p = IMP.atom.Residue(particle).get_parent()
942  elif IMP.atom.Fragment.get_is_setup(particle):
943  p = IMP.atom.Fragment(particle).get_parent()
944  prerootfragment = particle
945  particle = p
946  return (
947  (IMP.atom.Hierarchy(particle), IMP.atom.Hierarchy(prerootfragment))
948  )
949 
950  def get_all_root_hierarchies_by_resolution(self, resolution):
951  hierarchies = []
952  resolution = float(resolution)
953  particles = self.get_all_particles_by_resolution(resolution)
954  for p in particles:
955  rh = self.root_hierarchy_dict[p]
956  if rh not in hierarchies:
957  hierarchies.append(IMP.atom.Hierarchy(rh))
958  return hierarchies
959 
960  def get_preroot_fragments_by_resolution(self, name, resolution):
961  fragments = []
962  resolution = float(resolution)
963  particles = self.get_particles_by_resolution(name, resolution)
964  for p in particles:
965  fr = self.preroot_fragment_hierarchy_dict[p]
966  if fr not in fragments:
967  fragments.append(fr)
968  return fragments
969 
970  def show(self, name):
971  print(name)
972  for resn in self.get_residue_numbers(name):
973  print(resn)
974  for resolution in self.get_residue_resolutions(name, resn):
975  print("----", resolution)
976  for p in self.get_particles(name, resn, resolution):
977  print("--------", p.get_name())
978 
979 
980 def get_prot_name_from_particle(p, list_of_names):
981  '''Return the component name provided a particle and a list of names'''
982  root = p
983  protname = root.get_name()
984  is_a_bead = False
985  while not protname in list_of_names:
986  root0 = root.get_parent()
987  if root0 == IMP.atom.Hierarchy():
988  return (None, None)
989  protname = root0.get_name()
990 
991  # check if that is a bead
992  # this piece of code might be dangerous if
993  # the hierarchy was called Bead :)
994  if "Beads" in protname:
995  is_a_bead = True
996  root = root0
997  return (protname, is_a_bead)
998 
999 
1001  '''
1002  Retrieve the residue indexes for the given particle.
1003 
1004  The particle must be an instance of Fragment,Residue or Atom
1005  or else returns an empty list
1006  '''
1007  resind = []
1009  resind = IMP.atom.Fragment(hier).get_residue_indexes()
1010  elif IMP.atom.Residue.get_is_setup(hier):
1011  resind = [IMP.atom.Residue(hier).get_index()]
1012  elif IMP.atom.Atom.get_is_setup(hier):
1013  a = IMP.atom.Atom(hier)
1014  resind = [IMP.atom.Residue(a.get_parent()).get_index()]
1015  else:
1016  resind = []
1017  return resind
1018 
1019 
1020 def sort_by_residues(particles):
1021  particles_residues = [(p, IMP.pmi.tools.get_residue_indexes(p))
1022  for p in particles]
1023  sorted_particles_residues = sorted(
1024  particles_residues,
1025  key=lambda tup: tup[1])
1026  particles = [p[0] for p in sorted_particles_residues]
1027  return particles
1028 
1029 
1030 def get_residue_to_particle_map(particles):
1031  # this function returns a dictionary that map particles to residue indexes
1032  particles = sort_by_residues(particles)
1033  particles_residues = [(p, IMP.pmi.tools.get_residue_indexes(p))
1034  for p in particles]
1035  return dict(zip(particles_residues, particles))
1036 
1037 #
1038 # Parallel Computation
1039 #
1040 
1041 
1043  """Synchronize data over a parallel run"""
1044  from mpi4py import MPI
1045  comm = MPI.COMM_WORLD
1046  rank = comm.Get_rank()
1047  number_of_processes = comm.size
1048  comm.Barrier()
1049  if rank != 0:
1050  comm.send(data, dest=0, tag=11)
1051 
1052  elif rank == 0:
1053  for i in range(1, number_of_processes):
1054  data_tmp = comm.recv(source=i, tag=11)
1055  if type(data) == list:
1056  data += data_tmp
1057  elif type(data) == dict:
1058  data.update(data_tmp)
1059  else:
1060  raise TypeError("data not supported, use list or dictionaries")
1061 
1062  for i in range(1, number_of_processes):
1063  comm.send(data, dest=i, tag=11)
1064 
1065  if rank != 0:
1066  data = comm.recv(source=0, tag=11)
1067  return data
1068 
1070  """Synchronize data over a parallel run"""
1071  from mpi4py import MPI
1072  comm = MPI.COMM_WORLD
1073  rank = comm.Get_rank()
1074  number_of_processes = comm.size
1075  comm.Barrier()
1076  if rank != 0:
1077  comm.send(data, dest=0, tag=11)
1078  elif rank == 0:
1079  for i in range(1, number_of_processes):
1080  data_tmp = comm.recv(source=i, tag=11)
1081  for k in data:
1082  data[k]+=data_tmp[k]
1083 
1084  for i in range(1, number_of_processes):
1085  comm.send(data, dest=i, tag=11)
1086 
1087  if rank != 0:
1088  data = comm.recv(source=0, tag=11)
1089  return data
1090 
1091 
1092 #
1093 ### Lists and iterators
1094 #
1095 
1096 def sublist_iterator(l, lmin=None, lmax=None):
1097  '''
1098  Yield all sublists of length >= lmin and <= lmax
1099  '''
1100  if lmin is None:
1101  lmin = 0
1102  if lmax is None:
1103  lmax = len(l)
1104  n = len(l) + 1
1105  for i in range(n):
1106  for j in range(i + 1, n):
1107  if len(l[i:j]) <= lmax and len(l[i:j]) >= lmin:
1108  yield l[i:j]
1109 
1110 
1111 def flatten_list(l):
1112  return [item for sublist in l for item in sublist]
1113 
1114 
1115 def list_chunks_iterator(list, length):
1116  """ Yield successive length-sized chunks from a list.
1117  """
1118  for i in range(0, len(list), length):
1119  yield list[i:i + length]
1120 
1121 
1122 def chunk_list_into_segments(seq, num):
1123  seq = list(seq)
1124  avg = len(seq) / float(num)
1125  out = []
1126  last = 0.0
1127 
1128  while last < len(seq):
1129  out.append(seq[int(last):int(last + avg)])
1130  last += avg
1131 
1132  return out
1133 
1134 #
1135 # COORDINATE MANIPULATION
1136 #
1137 
1138 
1139 def translate_hierarchy(hierarchy, translation_vector):
1140  '''
1141  Apply a translation to a hierarchy along the input vector.
1142  '''
1143  rbs = set()
1144  xyzs = set()
1145  if type(translation_vector) == list:
1146  transformation = IMP.algebra.Transformation3D(
1147  IMP.algebra.Vector3D(translation_vector))
1148  else:
1149  transformation = IMP.algebra.Transformation3D(translation_vector)
1150  for p in IMP.atom.get_leaves(hierarchy):
1152  rbs.add(IMP.core.RigidBody(p))
1154  rb = IMP.core.RigidMember(p).get_rigid_body()
1155  rbs.add(rb)
1156  else:
1157  xyzs.add(p)
1158  for xyz in xyzs:
1159  IMP.core.transform(IMP.core.XYZ(xyz), transformation)
1160  for rb in rbs:
1161  IMP.core.transform(rb, transformation)
1162 
1163 
1164 def translate_hierarchies(hierarchies, translation_vector):
1165  for h in hierarchies:
1166  IMP.pmi.tools.translate_hierarchy(h, translation_vector)
1167 
1168 
1169 def translate_hierarchies_to_reference_frame(hierarchies):
1170  xc = 0
1171  yc = 0
1172  zc = 0
1173  nc = 0
1174  for h in hierarchies:
1175  for p in IMP.atom.get_leaves(h):
1176  coor = IMP.core.XYZ(p).get_coordinates()
1177  nc += 1
1178  xc += coor[0]
1179  yc += coor[1]
1180  zc += coor[2]
1181  xc = xc / nc
1182  yc = yc / nc
1183  zc = zc / nc
1184  IMP.pmi.tools.translate_hierarchies(hierarchies, (-xc, -yc, -zc))
1185 
1186 
1187 #
1188 # Tools to simulate data
1189 #
1190 
1191 def normal_density_function(expected_value, sigma, x):
1192  return (
1193  1 / math.sqrt(2 * math.pi) / sigma *
1194  math.exp(-(x - expected_value) ** 2 / 2 / sigma / sigma)
1195  )
1196 
1197 
1198 def log_normal_density_function(expected_value, sigma, x):
1199  return (
1200  1 / math.sqrt(2 * math.pi) / sigma / x *
1201  math.exp(-(math.log(x / expected_value) ** 2 / 2 / sigma / sigma))
1202  )
1203 
1204 
1205 def get_random_residue_pairs(representation, resolution,
1206  number,
1207  max_distance=None,
1208  avoid_same_particles=False,
1209  names=None):
1210 
1211  particles = []
1212  if names is None:
1213  names=list(representation.hier_dict.keys())
1214 
1215  for name in names:
1216  prot = representation.hier_dict[name]
1217  particles += select(representation,name=name,resolution=resolution)
1218  random_residue_pairs = []
1219  while len(random_residue_pairs)<=number:
1220  p1 = random.choice(particles)
1221  p2 = random.choice(particles)
1222  if max_distance is not None and \
1223  core.get_distance(core.XYZ(p1), core.XYZ(p2)) > max_distance:
1224  continue
1225  r1 = random.choice(IMP.pmi.tools.get_residue_indexes(p1))
1226  r2 = random.choice(IMP.pmi.tools.get_residue_indexes(p2))
1227  if r1==r2 and avoid_same_particles: continue
1228  name1 = representation.get_prot_name_from_particle(p1)
1229  name2 = representation.get_prot_name_from_particle(p2)
1230  random_residue_pairs.append((name1, r1, name2, r2))
1231 
1232  return random_residue_pairs
1233 
1234 
1235 def get_random_data_point(
1236  expected_value,
1237  ntrials,
1238  sensitivity,
1239  sigma,
1240  outlierprob,
1241  begin_end_nbins_tuple,
1242  log=False,
1243  loggrid=False):
1244  import random
1245 
1246  begin = begin_end_nbins_tuple[0]
1247  end = begin_end_nbins_tuple[1]
1248  nbins = begin_end_nbins_tuple[2]
1249 
1250  if not loggrid:
1251  fmod_grid = get_grid(begin, end, nbins, True)
1252  else:
1253  fmod_grid = get_log_grid(begin, end, nbins)
1254 
1255  norm = 0
1256  cumul = []
1257  cumul.append(0)
1258 
1259  a = []
1260  for i in range(0, ntrials):
1261  a.append([random.random(), True])
1262 
1263  if sigma != 0.0:
1264  for j in range(1, len(fmod_grid)):
1265  fj = fmod_grid[j]
1266  fjm1 = fmod_grid[j - 1]
1267  df = fj - fjm1
1268 
1269  if not log:
1270  pj = normal_density_function(expected_value, sigma, fj)
1271  pjm1 = normal_density_function(expected_value, sigma, fjm1)
1272  else:
1273  pj = log_normal_density_function(expected_value, sigma, fj)
1274  pjm1 = log_normal_density_function(expected_value, sigma, fjm1)
1275 
1276  norm += (pj + pjm1) / 2.0 * df
1277  cumul.append(norm)
1278  # print fj, pj
1279 
1280  random_points = []
1281 
1282  for i in range(len(cumul)):
1283  # print i,a, cumul[i], norm
1284  for aa in a:
1285  if (aa[0] <= cumul[i] / norm and aa[1]):
1286  random_points.append(
1287  int(fmod_grid[i] / sensitivity) * sensitivity)
1288  aa[1] = False
1289 
1290  else:
1291  random_points = [expected_value] * ntrials
1292 
1293  for i in range(len(random_points)):
1294  if random.random() < outlierprob:
1295  a = random.uniform(begin, end)
1296  random_points[i] = int(a / sensitivity) * sensitivity
1297  print(random_points)
1298  '''
1299  for i in range(ntrials):
1300  if random.random() > OUTLIERPROB_:
1301  r=truncnorm.rvs(0.0,1.0,expected_value,BETA_)
1302  if r>1.0: print r,expected_value,BETA_
1303  else:
1304  r=random.random()
1305  random_points.append(int(r/sensitivity)*sensitivity)
1306  '''
1307 
1308  rmean = 0.
1309  rmean2 = 0.
1310  for r in random_points:
1311  rmean += r
1312  rmean2 += r * r
1313 
1314  rmean /= float(ntrials)
1315  rmean2 /= float(ntrials)
1316  stddev = math.sqrt(max(rmean2 - rmean * rmean, 0.))
1317  return rmean, stddev
1318 
1319 def print_multicolumn(list_of_strings, ncolumns=2, truncate=40):
1320 
1321  l = list_of_strings
1322 
1323  cols = ncolumns
1324  # add empty entries after l
1325  for i in range(len(l) % cols):
1326  l.append(" ")
1327 
1328  split = [l[i:i + len(l) / cols] for i in range(0, len(l), len(l) / cols)]
1329  for row in zip(*split):
1330  print("".join(str.ljust(i, truncate) for i in row))
1331 
1332 class ColorChange(object):
1333  '''Change color code to hexadecimal to rgb'''
1334  def __init__(self):
1335  self._NUMERALS = '0123456789abcdefABCDEF'
1336  self._HEXDEC = dict((v, int(v, 16)) for v in (x+y for x in self._NUMERALS for y in self._NUMERALS))
1337  self.LOWERCASE, self.UPPERCASE = 'x', 'X'
1338 
1339  def rgb(self,triplet):
1340  return float(self._HEXDEC[triplet[0:2]]), float(self._HEXDEC[triplet[2:4]]), float(self._HEXDEC[triplet[4:6]])
1341 
1342  def triplet(self,rgb, lettercase=None):
1343  if lettercase is None: lettercase=self.LOWERCASE
1344  return format(rgb[0]<<16 | rgb[1]<<8 | rgb[2], '06'+lettercase)
1345 
1346 # -------------- Collections --------------- #
1347 
1348 class OrderedSet(collections.MutableSet):
1349 
1350  def __init__(self, iterable=None):
1351  self.end = end = []
1352  end += [None, end, end] # sentinel node for doubly linked list
1353  self.map = {} # key --> [key, prev, next]
1354  if iterable is not None:
1355  self |= iterable
1356 
1357  def __len__(self):
1358  return len(self.map)
1359 
1360  def __contains__(self, key):
1361  return key in self.map
1362 
1363  def add(self, key):
1364  if key not in self.map:
1365  end = self.end
1366  curr = end[1]
1367  curr[2] = end[1] = self.map[key] = [key, curr, end]
1368 
1369  def discard(self, key):
1370  if key in self.map:
1371  key, prev, next = self.map.pop(key)
1372  prev[2] = next
1373  next[1] = prev
1374 
1375  def __iter__(self):
1376  end = self.end
1377  curr = end[2]
1378  while curr is not end:
1379  yield curr[0]
1380  curr = curr[2]
1381 
1382  def __reversed__(self):
1383  end = self.end
1384  curr = end[1]
1385  while curr is not end:
1386  yield curr[0]
1387  curr = curr[1]
1388 
1389  def pop(self, last=True):
1390  if not self:
1391  raise KeyError('set is empty')
1392  if last:
1393  key = self.end[1][0]
1394  else:
1395  key = self.end[2][0]
1396  self.discard(key)
1397  return key
1398 
1399  def __repr__(self):
1400  if not self:
1401  return '%s()' % (self.__class__.__name__,)
1402  return '%s(%r)' % (self.__class__.__name__, list(self))
1403 
1404  def __eq__(self, other):
1405  if isinstance(other, OrderedSet):
1406  return len(self) == len(other) and list(self) == list(other)
1407  return set(self) == set(other)
1408 
1409 
1410 class OrderedDefaultDict(OrderedDict):
1411  """Store objects in order they were added, but with default type.
1412  Source: http://stackoverflow.com/a/4127426/2608793
1413  """
1414  def __init__(self, *args, **kwargs):
1415  if not args:
1416  self.default_factory = None
1417  else:
1418  if not (args[0] is None or callable(args[0])):
1419  raise TypeError('first argument must be callable or None')
1420  self.default_factory = args[0]
1421  args = args[1:]
1422  super(OrderedDefaultDict, self).__init__(*args, **kwargs)
1423 
1424  def __missing__ (self, key):
1425  if self.default_factory is None:
1426  raise KeyError(key)
1427  self[key] = default = self.default_factory()
1428  return default
1429 
1430  def __reduce__(self): # optional, for pickle support
1431  args = (self.default_factory,) if self.default_factory else ()
1432  return self.__class__, args, None, None, self.iteritems()
1433 
1434 # -------------- PMI2 Tools --------------- #
1435 
1436 def set_coordinates_from_rmf(hier,rmf_fn,frame_num=0):
1437  """Extract frame from RMF file and fill coordinates. Must be identical topology.
1438  @param hier The (System) hierarchy to fill (e.g. after you've built it)
1439  @param rmf_fn The file to extract from
1440  @param frame_num The frame number to extract
1441  """
1442  rh = RMF.open_rmf_file_read_only(rmf_fn)
1443  IMP.rmf.link_hierarchies(rh,[hier])
1444  IMP.rmf.load_frame(rh, RMF.FrameID(frame_num))
1445  del rh
1446 
1447 def input_adaptor(stuff,
1448  pmi_resolution=0,
1449  flatten=False,
1450  selection_tuple=None):
1451  """Adapt things for PMI (degrees of freedom, restraints, ...)
1452  Returns list of list of hierarchies, separated into Molecules if possible.
1453  (iterable of ^2) hierarchy -> returns input as list of list of hierarchies, only one entry.
1454  (iterable of ^2) PMI::System/State/Molecule/TempResidue ->
1455  returns residue hierarchies, grouped in molecules, at requested resolution
1456  @param stuff Can be one of the following inputs:
1457  IMP Hierarchy, PMI System/State/Molecule/TempResidue, or a list/set (of list/set) of them.
1458  Must be uniform input, however. No mixing object types.
1459  @param pmi_resolution For selecting, only does it if you pass PMI objects. Set it to "all"
1460  if you want all resolutions!
1461  @param flatten Set to True if you just want all hierarchies in one list.
1462  \note since this relies on IMP::atom::Selection, this will not return any objects if they weren't built!
1463  But there should be no problem if you request unbuilt residues, they should be ignored.
1464  """
1465 
1466  # if iterable (of iterable), make sure uniform type
1467  def get_ok_iter(it):
1468  type_set = set(type(a) for a in it)
1469  if len(type_set)!=1:
1470  raise Exception('input_adaptor: can only pass one type of object at a time')
1471  xtp = type_set.pop()
1472  return xtp,list(it)
1473 
1474  if stuff is None:
1475  return stuff
1476  if hasattr(stuff,'__iter__'):
1477  if len(stuff)==0:
1478  return stuff
1479  tp,thelist = get_ok_iter(stuff)
1480 
1481  # iter of iter of should be ok
1482  if hasattr(next(iter(thelist)),'__iter__'):
1483  flatlist = [i for sublist in thelist for i in sublist]
1484  tp,thelist = get_ok_iter(flatlist)
1485 
1486  stuff = thelist
1487  else:
1488  tp = type(stuff)
1489  stuff = [stuff]
1490 
1491  # now that things are ok, do selection if requested
1492  hier_list = []
1493  pmi_input = False
1496  # if PMI, perform selection using gathered indexes
1497  pmi_input = True
1498  indexes_per_mol = OrderedDefaultDict(list) #key is Molecule object, value are residues
1499  if tp==IMP.pmi.topology.System:
1500  for system in stuff:
1501  for state in system.get_states():
1502  mdict = state.get_molecules()
1503  for molname in mdict:
1504  for copy in mdict[molname]:
1505  indexes_per_mol[copy] += [r.get_index() for r in copy.get_residues()]
1506  elif tp==IMP.pmi.topology.State:
1507  for state in stuff:
1508  mdict = state.get_molecules()
1509  for molname in mdict:
1510  for copy in mdict[molname]:
1511  indexes_per_mol[copy] += [r.get_index() for r in copy.get_residues()]
1512  elif tp==IMP.pmi.topology.Molecule:
1513  for molecule in stuff:
1514  indexes_per_mol[molecule] += [r.get_index() for r in molecule.get_residues()]
1516  for tempres in stuff:
1517  indexes_per_mol[tempres.get_molecule()].append(tempres.get_index())
1518  for mol in indexes_per_mol:
1519  if pmi_resolution=='all':
1520  # because you select from the molecule,
1521  # this will start the search from the base resolution
1522  ps = select_at_all_resolutions(mol.get_hierarchy(),
1523  residue_indexes=indexes_per_mol[mol])
1524  else:
1525  sel = IMP.atom.Selection(mol.get_hierarchy(),
1526  resolution=pmi_resolution,
1527  residue_indexes=indexes_per_mol[mol])
1528  ps = sel.get_selected_particles()
1529  hier_list.append([IMP.atom.Hierarchy(p) for p in ps])
1530  else:
1531  try:
1532  if IMP.atom.Hierarchy.get_is_setup(stuff[0]):
1533  hier_list = stuff
1534  else:
1535  raise Exception('input_adaptor: you passed something of type',tp)
1536  except:
1537  raise Exception('input_adaptor: you passed something of type',tp)
1538 
1539  if flatten and pmi_input:
1540  return [h for sublist in hier_list for h in sublist]
1541  else:
1542  return hier_list
1543 
1544 def get_residue_type_from_one_letter_code(code):
1545  threetoone = {'ALA': 'A', 'ARG': 'R', 'ASN': 'N', 'ASP': 'D',
1546  'CYS': 'C', 'GLU': 'E', 'GLN': 'Q', 'GLY': 'G',
1547  'HIS': 'H', 'ILE': 'I', 'LEU': 'L', 'LYS': 'K',
1548  'MET': 'M', 'PHE': 'F', 'PRO': 'P', 'SER': 'S',
1549  'THR': 'T', 'TRP': 'W', 'TYR': 'Y', 'VAL': 'V', 'UNK': 'X'}
1550  one_to_three={}
1551  for k in threetoone:
1552  one_to_three[threetoone[k]] = k
1553  return IMP.atom.ResidueType(one_to_three[code])
1554 
1555 
1556 def get_all_leaves(list_of_hs):
1557  """ Just get the leaves from a list of hierarchies """
1558  lvs = list(itertools.chain.from_iterable(IMP.atom.get_leaves(item) for item in list_of_hs))
1559  return lvs
1560 
1561 
1563  hiers=None,
1564  **kwargs):
1565  """Perform selection using the usual keywords but return ALL resolutions (BEADS and GAUSSIANS).
1566  Returns in flat list!
1567  """
1568 
1569  if hiers is None:
1570  hiers = []
1571  if hier is not None:
1572  hiers.append(hier)
1573  if len(hiers)==0:
1574  print("WARNING: You passed nothing to select_at_all_resolutions()")
1575  return []
1576  ret = OrderedSet()
1577  for hsel in hiers:
1578  try:
1579  htest = IMP.atom.Hierarchy.get_is_setup(hsel)
1580  except:
1581  raise Exception('select_at_all_resolutions: you have to pass an IMP Hierarchy')
1582  if not htest:
1583  raise Exception('select_at_all_resolutions: you have to pass an IMP Hierarchy')
1584  if 'resolution' in kwargs or 'representation_type' in kwargs:
1585  raise Exception("don't pass resolution or representation_type to this function")
1586  selB = IMP.atom.Selection(hsel,resolution=IMP.atom.ALL_RESOLUTIONS,
1587  representation_type=IMP.atom.BALLS,**kwargs)
1588  selD = IMP.atom.Selection(hsel,resolution=IMP.atom.ALL_RESOLUTIONS,
1589  representation_type=IMP.atom.DENSITIES,**kwargs)
1590  ret |= OrderedSet(selB.get_selected_particles())
1591  ret |= OrderedSet(selD.get_selected_particles())
1592  return list(ret)
1593 
1594 
1596  target_ps,
1597  sel_zone,
1598  entire_residues,
1599  exclude_backbone):
1600  """Utility to retrieve particles from a hierarchy within a
1601  zone around a set of ps.
1602  @param hier The hierarchy in which to look for neighbors
1603  @param target_ps The particles for zoning
1604  @param sel_zone The maximum distance
1605  @param entire_residues If True, will grab entire residues
1606  @param exclude_backbone If True, will only return sidechain particles
1607  """
1608 
1609  test_sel = IMP.atom.Selection(hier)
1610  backbone_types=['C','N','CB','O']
1611  if exclude_backbone:
1612  test_sel -= IMP.atom.Selection(hier,atom_types=[IMP.atom.AtomType(n)
1613  for n in backbone_types])
1614  test_ps = test_sel.get_selected_particles()
1615  nn = IMP.algebra.NearestNeighbor3D([IMP.core.XYZ(p).get_coordinates()
1616  for p in test_ps])
1617  zone = set()
1618  for target in target_ps:
1619  zone|=set(nn.get_in_ball(IMP.core.XYZ(target).get_coordinates(),sel_zone))
1620  zone_ps = [test_ps[z] for z in zone]
1621  if entire_residues:
1622  final_ps = set()
1623  for z in zone_ps:
1624  final_ps|=set(IMP.atom.Hierarchy(z).get_parent().get_children())
1625  zone_ps = [h.get_particle() for h in final_ps]
1626  return zone_ps
1627 
1628 
1630  """Returns unique objects in original order"""
1631  rbs = set()
1632  beads = []
1633  rbs_ordered = []
1634  if not hasattr(hiers,'__iter__'):
1635  hiers = [hiers]
1636  for p in get_all_leaves(hiers):
1638  rb = IMP.core.RigidMember(p).get_rigid_body()
1639  if rb not in rbs:
1640  rbs.add(rb)
1641  rbs_ordered.append(rb)
1643  rb = IMP.core.NonRigidMember(p).get_rigid_body()
1644  if rb not in rbs:
1645  rbs.add(rb)
1646  rbs_ordered.append(rb)
1647  beads.append(p)
1648  else:
1649  beads.append(p)
1650  return rbs_ordered,beads
1651 
1652 def shuffle_configuration(root_hier,
1653  max_translation=300., max_rotation=2.0 * pi,
1654  avoidcollision_rb=True, avoidcollision_fb=False,
1655  cutoff=10.0, niterations=100,
1656  bounding_box=None,
1657  excluded_rigid_bodies=[],
1658  hierarchies_excluded_from_collision=[],
1659  verbose=False):
1660  """Shuffle particles. Used to restart the optimization.
1661  The configuration of the system is initialized by placing each
1662  rigid body and each bead randomly in a box with a side of
1663  max_translation angstroms, and far enough from each other to
1664  prevent any steric clashes. The rigid bodies are also randomly rotated.
1665  @param root_hier The hierarchy to shuffle. Will find rigid bodies and flexible beads.
1666  @param max_translation Max translation (rbs and flexible beads)
1667  @param max_rotation Max rotation (rbs only)
1668  @param avoidcollision_rb check if the particle/rigid body was
1669  placed close to another particle; uses the optional
1670  arguments cutoff and niterations
1671  @param avoidcollision_fb Advanced. Generally you want this False because it's hard to shuffle beads.
1672  @param cutoff Distance less than this is a collision
1673  @param niterations How many times to try avoiding collision
1674  @param bounding_box Only shuffle particles within this box. Defined by ((x1,y1,z1),(x2,y2,z2)).
1675  @param excluded_rigid_bodies Don't shuffle these rigid body objects
1676  @param hierarchies_excluded_from_collision Don't count collision with these bodies
1677  @param verbose Give more output
1678  \note Best to only call this function after you've set up degrees of freedom
1679  """
1680 
1681  ### checking input
1682  rigid_bodies,flexible_beads = get_rbs_and_beads(root_hier)
1683 
1684  if len(rigid_bodies)>0:
1685  mdl = rigid_bodies[0].get_model()
1686  elif len(flexible_beads)>0:
1687  mdl = flexible_beads[0].get_model()
1688  else:
1689  raise Exception("You passed something weird to shuffle_configuration")
1690 
1691  if len(rigid_bodies) == 0:
1692  print("shuffle_configuration: rigid bodies were not intialized")
1693 
1694  ### gather all particles
1696  gcpf.set_distance(cutoff)
1697  allparticleindexes = []
1698  hierarchies_excluded_from_collision_indexes = []
1699 
1700  # Add particles from excluded hierarchies to excluded list
1701  for h in hierarchies_excluded_from_collision:
1702  for p in IMP.core.get_leaves(h):
1703  hierarchies_excluded_from_collision_indexes.append(p.get_particle_index())
1704 
1705  for p in IMP.core.get_leaves(root_hier):
1707  allparticleindexes.append(p.get_particle_index())
1708  # remove the fixed densities particles out of the calculation
1710  hierarchies_excluded_from_collision_indexes.append(p.get_particle_index())
1711  if not bounding_box is None:
1712  ((x1, y1, z1), (x2, y2, z2)) = bounding_box
1713  ub = IMP.algebra.Vector3D(x1, y1, z1)
1714  lb = IMP.algebra.Vector3D(x2, y2, z2)
1715  bb = IMP.algebra.BoundingBox3D(ub, lb)
1716 
1717  allparticleindexes = list(
1718  set(allparticleindexes) - set(hierarchies_excluded_from_collision_indexes))
1719 
1720  print('shuffling', len(rigid_bodies), 'rigid bodies')
1721  for rb in rigid_bodies:
1722  if rb not in excluded_rigid_bodies:
1723  if avoidcollision_rb:
1724 
1725  rbindexes = rb.get_member_particle_indexes()
1726 
1727  rbindexes = list(
1728  set(rbindexes) - set(hierarchies_excluded_from_collision_indexes))
1729  otherparticleindexes = list(
1730  set(allparticleindexes) - set(rbindexes))
1731 
1732  if len(otherparticleindexes) is None:
1733  continue
1734 
1735  niter = 0
1736  while niter < niterations:
1737  rbxyz = (rb.get_x(), rb.get_y(), rb.get_z())
1738 
1739  # overrides the perturbation
1740  if not bounding_box is None:
1741  translation = IMP.algebra.get_random_vector_in(bb)
1742  old_coord=IMP.core.XYZ(rb).get_coordinates()
1744  transformation = IMP.algebra.Transformation3D(
1745  rotation,
1746  translation-old_coord)
1747  else:
1749  rbxyz,
1750  max_translation,
1751  max_rotation)
1752 
1753  IMP.core.transform(rb, transformation)
1754 
1755  if avoidcollision_rb:
1756  mdl.update()
1757  npairs = len(
1758  gcpf.get_close_pairs(
1759  mdl,
1760  otherparticleindexes,
1761  rbindexes))
1762  if npairs == 0:
1763  niter = niterations
1764  else:
1765  niter += 1
1766  if verbose:
1767  print("shuffle_configuration: rigid body placed close to other %d particles, trying again..." % npairs)
1768  print("shuffle_configuration: rigid body name: " + rb.get_name())
1769  if niter == niterations:
1770  raise ValueError("tried the maximum number of iterations to avoid collisions, increase the distance cutoff")
1771  else:
1772  break
1773 
1774  print('shuffling', len(flexible_beads), 'flexible beads')
1775  for fb in flexible_beads:
1776  if avoidcollision_fb:
1777  fbindexes = IMP.get_indexes([fb])
1778  otherparticleindexes = list(
1779  set(allparticleindexes) - set(fbindexes))
1780  if len(otherparticleindexes) is None:
1781  continue
1782  niter = 0
1783  while niter < niterations:
1784  fbxyz = IMP.core.XYZ(fb).get_coordinates()
1785  if not bounding_box is None:
1786  # overrides the perturbation
1787  translation = IMP.algebra.get_random_vector_in(bb)
1788  transformation = IMP.algebra.Transformation3D(translation-fbxyz)
1789  else:
1791  fbxyz,
1792  max_translation,
1793  max_rotation)
1794 
1795  if IMP.core.RigidBody.get_is_setup(fb): #for gaussians
1796  d=IMP.core.RigidBody(fb)
1797  else:
1798  d=IMP.core.XYZ(fb)
1799 
1800  IMP.core.transform(d, transformation)
1801 
1802  if avoidcollision_fb:
1803  mdl.update()
1804  npairs = len(
1805  gcpf.get_close_pairs(
1806  mdl,
1807  otherparticleindexes,
1808  fbindexes))
1809  if npairs == 0:
1810  niter = niterations
1811  else:
1812  niter += 1
1813  print("shuffle_configuration: floppy body placed close to other %d particles, trying again..." % npairs)
1814  if niter == niterations:
1815  raise ValueError("tried the maximum number of iterations to avoid collisions, increase the distance cutoff")
1816  else:
1817  break
1818 
1819 class Colors(object):
1820  def __init__(self):
1821  self.colors={
1822  "reds":[("maroon","#800000",(128,0,0)),("dark red","#8B0000",(139,0,0)),
1823  ("brown","#A52A2A",(165,42,42)),("firebrick","#B22222",(178,34,34)),
1824  ("crimson","#DC143C",(220,20,60)),("red","#FF0000",(255,0,0)),
1825  ("tomato","#FF6347",(255,99,71)),("coral","#FF7F50",(255,127,80)),
1826  ("indian red","#CD5C5C",(205,92,92)),("light coral","#F08080",(240,128,128)),
1827  ("dark salmon","#E9967A",(233,150,122)),("salmon","#FA8072",(250,128,114)),
1828  ("light salmon","#FFA07A",(255,160,122)),("orange red","#FF4500",(255,69,0)),
1829  ("dark orange","#FF8C00",(255,140,0))],
1830  "yellows":[("orange","#FFA500",(255,165,0)),("gold","#FFD700",(255,215,0)),
1831  ("dark golden rod","#B8860B",(184,134,11)),("golden rod","#DAA520",(218,165,32)),
1832  ("pale golden rod","#EEE8AA",(238,232,170)),("dark khaki","#BDB76B",(189,183,107)),
1833  ("khaki","#F0E68C",(240,230,140)),("olive","#808000",(128,128,0)),
1834  ("yellow","#FFFF00",(255,255,0)),("antique white","#FAEBD7",(250,235,215)),
1835  ("beige","#F5F5DC",(245,245,220)),("bisque","#FFE4C4",(255,228,196)),
1836  ("blanched almond","#FFEBCD",(255,235,205)),("wheat","#F5DEB3",(245,222,179)),
1837  ("corn silk","#FFF8DC",(255,248,220)),("lemon chiffon","#FFFACD",(255,250,205)),
1838  ("light golden rod yellow","#FAFAD2",(250,250,210)),("light yellow","#FFFFE0",(255,255,224))],
1839  "greens":[("yellow green","#9ACD32",(154,205,50)),("dark olive green","#556B2F",(85,107,47)),
1840  ("olive drab","#6B8E23",(107,142,35)),("lawn green","#7CFC00",(124,252,0)),
1841  ("chart reuse","#7FFF00",(127,255,0)),("green yellow","#ADFF2F",(173,255,47)),
1842  ("dark green","#006400",(0,100,0)),("green","#008000",(0,128,0)),
1843  ("forest green","#228B22",(34,139,34)),("lime","#00FF00",(0,255,0)),
1844  ("lime green","#32CD32",(50,205,50)),("light green","#90EE90",(144,238,144)),
1845  ("pale green","#98FB98",(152,251,152)),("dark sea green","#8FBC8F",(143,188,143)),
1846  ("medium spring green","#00FA9A",(0,250,154)),("spring green","#00FF7F",(0,255,127)),
1847  ("sea green","#2E8B57",(46,139,87)),("medium aqua marine","#66CDAA",(102,205,170)),
1848  ("medium sea green","#3CB371",(60,179,113)),("light sea green","#20B2AA",(32,178,170)),
1849  ("dark slate gray","#2F4F4F",(47,79,79)),("teal","#008080",(0,128,128)),
1850  ("dark cyan","#008B8B",(0,139,139))],
1851  "blues":[("dark turquoise","#00CED1",(0,206,209)),
1852  ("turquoise","#40E0D0",(64,224,208)),("medium turquoise","#48D1CC",(72,209,204)),
1853  ("pale turquoise","#AFEEEE",(175,238,238)),("aqua marine","#7FFFD4",(127,255,212)),
1854  ("powder blue","#B0E0E6",(176,224,230)),("cadet blue","#5F9EA0",(95,158,160)),
1855  ("steel blue","#4682B4",(70,130,180)),("corn flower blue","#6495ED",(100,149,237)),
1856  ("deep sky blue","#00BFFF",(0,191,255)),("dodger blue","#1E90FF",(30,144,255)),
1857  ("light blue","#ADD8E6",(173,216,230)),("sky blue","#87CEEB",(135,206,235)),
1858  ("light sky blue","#87CEFA",(135,206,250)),("midnight blue","#191970",(25,25,112)),
1859  ("navy","#000080",(0,0,128)),("dark blue","#00008B",(0,0,139)),
1860  ("medium blue","#0000CD",(0,0,205)),("blue","#0000FF",(0,0,255)),("royal blue","#4169E1",(65,105,225)),
1861  ("aqua","#00FFFF",(0,255,255)),("cyan","#00FFFF",(0,255,255)),("light cyan","#E0FFFF",(224,255,255))],
1862  "violets":[("blue violet","#8A2BE2",(138,43,226)),("indigo","#4B0082",(75,0,130)),
1863  ("dark slate blue","#483D8B",(72,61,139)),("slate blue","#6A5ACD",(106,90,205)),
1864  ("medium slate blue","#7B68EE",(123,104,238)),("medium purple","#9370DB",(147,112,219)),
1865  ("dark magenta","#8B008B",(139,0,139)),("dark violet","#9400D3",(148,0,211)),
1866  ("dark orchid","#9932CC",(153,50,204)),("medium orchid","#BA55D3",(186,85,211)),
1867  ("purple","#800080",(128,0,128)),("thistle","#D8BFD8",(216,191,216)),
1868  ("plum","#DDA0DD",(221,160,221)),("violet","#EE82EE",(238,130,238)),
1869  ("magenta / fuchsia","#FF00FF",(255,0,255)),("orchid","#DA70D6",(218,112,214)),
1870  ("medium violet red","#C71585",(199,21,133)),("pale violet red","#DB7093",(219,112,147)),
1871  ("deep pink","#FF1493",(255,20,147)),("hot pink","#FF69B4",(255,105,180)),
1872  ("light pink","#FFB6C1",(255,182,193)),("pink","#FFC0CB",(255,192,203))],
1873  "browns":[("saddle brown","#8B4513",(139,69,19)),("sienna","#A0522D",(160,82,45)),
1874  ("chocolate","#D2691E",(210,105,30)),("peru","#CD853F",(205,133,63)),
1875  ("sandy brown","#F4A460",(244,164,96)),("burly wood","#DEB887",(222,184,135)),
1876  ("tan","#D2B48C",(210,180,140)),("rosy brown","#BC8F8F",(188,143,143)),
1877  ("moccasin","#FFE4B5",(255,228,181)),("navajo white","#FFDEAD",(255,222,173)),
1878  ("peach puff","#FFDAB9",(255,218,185)),("misty rose","#FFE4E1",(255,228,225)),
1879  ("lavender blush","#FFF0F5",(255,240,245)),("linen","#FAF0E6",(250,240,230)),
1880  ("old lace","#FDF5E6",(253,245,230)),("papaya whip","#FFEFD5",(255,239,213)),
1881  ("sea shell","#FFF5EE",(255,245,238))],
1882  "greys":[("black","#000000",(0,0,0)),("dim gray / dim grey","#696969",(105,105,105)),
1883  ("gray / grey","#808080",(128,128,128)),("dark gray / dark grey","#A9A9A9",(169,169,169)),
1884  ("silver","#C0C0C0",(192,192,192)),("light gray / light grey","#D3D3D3",(211,211,211)),
1885  ("gainsboro","#DCDCDC",(220,220,220)),("white smoke","#F5F5F5",(245,245,245)),
1886  ("white","#FFFFFF",(255,255,255))]}
1887 
1888  def assign_color_group(self,color_group,representation,component_names):
1889  for n,p in enumerate(component_names):
1890  s=IMP.atom.Selection(representation.prot,molecule=p)
1891  psel=s.get_selected_particles()
1892  ctuple=self.colors[color_group][n]
1893  print("Assigning "+p+" to color "+ctuple[0])
1894  c=ctuple[2]
1895  color=IMP.display.Color(float(c[0])/255,float(c[1])/255,float(c[2])/255)
1896  for part in psel:
1898  IMP.display.Colored(part).set_color(color)
1899  else:
def list_chunks_iterator
Yield successive length-sized chunks from a list.
Definition: tools.py:1115
Simple 3D transformation class.
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Definition: Residue.h:155
def select_at_all_resolutions
Perform selection using the usual keywords but return ALL resolutions (BEADS and GAUSSIANS).
Definition: tools.py:1562
Represent an RGB color.
Definition: Color.h:24
A decorator to associate a particle with a part of a protein/DNA/RNA.
Definition: Fragment.h:20
def get_restraint_set
Get a RestraintSet containing all PMI restraints added to the model.
Definition: tools.py:49
Store the representations for a system.
Definition: tools.py:832
def get_random_cross_link_dataset
Return a random cross-link dataset as a string.
Definition: tools.py:349
def shuffle_configuration
Shuffle particles.
Definition: tools.py:1654
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Definition: atom/Atom.h:241
void add_particles(RMF::FileHandle fh, const ParticlesTemp &hs)
def get_particles_within_zone
Utility to retrieve particles from a hierarchy within a zone around a set of ps.
Definition: tools.py:1595
Set of python classes to create a multi-state, multi-resolution IMP hierarchy.
static Weight setup_particle(Model *m, ParticleIndex pi)
Definition: Weight.h:31
def get_position_terminal_residue
Get the xyz position of the terminal residue at the given resolution.
Definition: tools.py:556
double get_drmsd(const Vector3DsOrXYZs0 &m0, const Vector3DsOrXYZs1 &m1)
Calculate distance the root mean square deviation between two sets of 3D points.
Definition: atom/distance.h:49
Rotation3D get_random_rotation_3d(const Rotation3D &center, double distance)
Pick a rotation at random near the provided one.
def scatter_and_gather_dict_append
Synchronize data over a parallel run.
Definition: tools.py:1069
def deprecated_function
Python decorator to mark a function as deprecated.
Definition: __init__.py:9549
Change color code to hexadecimal to rgb.
Definition: tools.py:1332
IMP::Vector< Color > Colors
Definition: Color.h:188
ParticlesTemp get_particles(Model *m, const ParticleIndexes &ps)
The type of an atom.
void add_particle(RMF::FileHandle fh, Particle *hs)
log
Definition: log.py:1
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Definition: rigid_bodies.h:469
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Definition: XYZ.h:49
GenericHierarchies get_leaves(Hierarchy mhd)
Get all the leaves of the bit of hierarchy.
def get_prot_name_from_particle
Return the component name provided a particle and a list of names.
Definition: tools.py:980
This class initializes the root node of the global IMP.atom.Hierarchy.
double get_distance(XYZR a, XYZR b)
Compute the sphere distance between a and b.
Definition: XYZR.h:87
def get_residue_gaps_in_hierarchy
Return the residue index gaps and contiguous segments in the hierarchy.
Definition: tools.py:625
Vector3D get_random_vector_in(const Cylinder3D &c)
Generate a random vector in a cylinder with uniform density.
def get_all_leaves
Just get the leaves from a list of hierarchies.
Definition: tools.py:1556
Add resolution to a particle.
Definition: Resolution.h:24
Object used to hold a set of restraints.
Definition: RestraintSet.h:36
algebra::GridD< 3, algebra::DenseGridStorageD< 3, float >, float > get_grid(DensityMap *in_map)
Return a dense grid containing the voxels of the passed density map.
Stores a named protein chain.
def input_adaptor
Adapt things for PMI (degrees of freedom, restraints, ...) Returns list of list of hierarchies...
Definition: tools.py:1455
static bool get_is_setup(Model *m, ParticleIndex pi)
Definition: Fragment.h:46
def translate_hierarchy
Apply a translation to a hierarchy along the input vector.
Definition: tools.py:1139
ParticleIndexPairs get_indexes(const ParticlePairsTemp &ps)
static bool get_is_setup(Model *m, ParticleIndex pi)
Definition: Colored.h:48
The standard decorator for manipulating molecular structures.
Ints get_index(const ParticlesTemp &particles, const Subset &subset, const Subsets &excluded)
A decorator for a particle representing an atom.
Definition: atom/Atom.h:234
def scatter_and_gather
Synchronize data over a parallel run.
Definition: tools.py:1042
void transform(XYZ a, const algebra::Transformation3D &tr)
Apply a transformation to the particle.
def add_restraint_to_model
Add a PMI restraint to the model.
Definition: tools.py:37
The type for a residue.
def get_terminal_residue_position
Get the xyz position of the terminal residue at the given resolution.
Definition: tools.py:588
void load_frame(RMF::FileConstHandle file, RMF::FrameID frame)
A decorator for a particle with x,y,z coordinates.
Definition: XYZ.h:30
static Scale setup_particle(Model *m, ParticleIndex pi)
Definition: Scale.h:30
A base class for Keys.
Definition: Key.h:46
static Colored setup_particle(Model *m, ParticleIndex pi, Color color)
Definition: Colored.h:62
A decorator for a particle that is part of a rigid body but not rigid.
Definition: rigid_bodies.h:488
def get_closest_residue_position
this function works with plain hierarchies, as read from the pdb, no multi-scale hierarchies ...
Definition: tools.py:525
std::ostream & show(Hierarchy h, std::ostream &out=std::cout)
Print the hierarchy using a given decorator to display each node.
Find all nearby pairs by testing all pairs.
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Definition: rigid_bodies.h:147
A decorator for a residue.
Definition: Residue.h:134
General purpose algebraic and geometric methods that are expected to be used by a wide variety of IMP...
static bool get_is_setup(Model *m, ParticleIndex p)
Check if the particle has the needed attributes for a cast to succeed.
The general base class for IMP exceptions.
Definition: exception.h:49
VectorD< 3 > Vector3D
Definition: VectorD.h:395
void link_hierarchies(RMF::FileConstHandle fh, const atom::Hierarchies &hs)
Class to handle individual model particles.
Definition: Particle.h:37
Stores a list of Molecules all with the same State index.
std::string get_data_path(std::string file_name)
Return the full path to one of this module's data files.
def set_coordinates_from_rmf
Extract frame from RMF file and fill coordinates.
Definition: tools.py:1436
def select
this function uses representation=SimplifiedModel it returns the corresponding selected particles rep...
Definition: tools.py:702
Python classes to represent, score, sample and analyze models.
double get_resolution(Model *m, ParticleIndex pi)
Estimate the resolution of the hierarchy as used by Representation.
A decorator for a rigid body.
Definition: rigid_bodies.h:75
def cross_link_db_filter_parser
example '"{ID_Score}" > 28 AND "{Sample}" == "%10_1%" OR ":Sample}" == "%10_2%" OR ":Sample...
Definition: tools.py:457
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Definition: Gaussian.h:47
def get_rbs_and_beads
Returns unique objects in original order.
Definition: tools.py:1629
Hierarchies get_leaves(const Selection &h)
Select hierarchy particles identified by the biological name.
Definition: Selection.h:66
Support for the RMF file format for storing hierarchical molecular data and markup.
def get_residue_indexes
Retrieve the residue indexes for the given particle.
Definition: tools.py:1000
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Definition: rigid_bodies.h:490
Transformation3D get_random_local_transformation(Vector3D origin, double max_translation=5., double max_angle_in_rad=0.26)
Get a local transformation.
Temporarily stores residue information, even without structure available.
Store objects in order they were added, but with default type.
Definition: tools.py:1410
Inferential scoring building on methods developed as part of the Inferential Structure Determination ...
def sublist_iterator
Yield all sublists of length >= lmin and <= lmax.
Definition: tools.py:1096
A particle with a color.
Definition: Colored.h:23