3 """@namespace IMP.pmi.tools
4 Miscellaneous utilities.
7 from __future__
import print_function
15 from math
import log,pi,sqrt,exp
21 from collections
import defaultdict
23 from collections
import OrderedDict
25 from IMP.pmi._compat_collections
import OrderedDict
27 def _get_restraint_set_key():
28 if not hasattr(_get_restraint_set_key,
'pmi_rs_key'):
29 _get_restraint_set_key.pmi_rs_key =
IMP.ModelKey(
"PMI restraints")
30 return _get_restraint_set_key.pmi_rs_key
32 def _add_restraint_set(model, mk):
34 model.add_data(mk, rs)
38 """Add a PMI restraint to the model.
39 Since Model.add_restraint() no longer exists (in modern IMP restraints
40 should be added to a ScoringFunction instead) store them instead in
41 a RestraintSet, and keep a reference to it in the Model."""
42 mk = _get_restraint_set_key()
43 if model.get_has_data(mk):
44 rs = IMP.RestraintSet.get_from(model.get_data(mk))
46 rs = _add_restraint_set(model, mk)
47 rs.add_restraint(restraint)
50 """Get a RestraintSet containing all PMI restraints added to the model"""
51 mk = _get_restraint_set_key()
52 if not model.get_has_data(mk):
53 print(
"WARNING: no restraints added to model yet")
54 _add_restraint_set(model, mk)
55 return IMP.RestraintSet.get_from(model.get_data(mk))
57 class Stopwatch(object):
59 def __init__(self, isdelta=True):
61 self.starttime = time.clock()
63 self.isdelta = isdelta
65 def set_label(self, labelstr):
71 newtime = time.clock()
75 "_delta_seconds"] = str(
78 self.starttime = newtime
80 output[
"Stopwatch_" + self.label +
81 "_elapsed_seconds"] = str(time.clock() - self.starttime)
85 class SetupNuisance(object):
87 def __init__(self, m, initialvalue, minvalue, maxvalue, isoptimized=True):
91 nuisance.set_lower(minvalue)
93 nuisance.set_upper(maxvalue)
96 nuisance.set_is_optimized(nuisance.get_nuisance_key(), isoptimized)
97 self.nuisance = nuisance
99 def get_particle(self):
103 class SetupWeight(object):
105 def __init__(self, m, isoptimized=True):
108 self.weight.set_weights_are_optimized(
True)
110 def get_particle(self):
114 class ParticleToSampleFilter(object):
115 def __init__(self, sampled_objects):
116 self.sampled_objects=sampled_objects
119 def add_filter(self,filter_string):
120 self.filters.append(filter_string)
122 def get_particles_to_sample(self):
123 particles_to_sample={}
124 for so
in self.sampled_objects:
125 ps_dict=so.get_particles_to_sample()
127 for f
in self.filters:
129 if key
not in particles_to_sample:
130 particles_to_sample[key]=ps_dict[key]
132 particles_to_sample[key]+=ps_dict[key]
133 return particles_to_sample
135 class ParticleToSampleList(object):
137 def __init__(self, label="None"):
139 self.dictionary_particle_type = {}
140 self.dictionary_particle_transformation = {}
141 self.dictionary_particle_name = {}
148 particle_transformation,
150 if not particle_type
in [
"Rigid_Bodies",
"Floppy_Bodies",
"Nuisances",
"X_coord",
"Weights"]:
151 raise TypeError(
"not the right particle type")
153 self.dictionary_particle_type[particle] = particle_type
154 if particle_type ==
"Rigid_Bodies":
155 if type(particle_transformation) == tuple
and len(particle_transformation) == 2
and type(particle_transformation[0]) == float
and type(particle_transformation[1]) == float:
156 self.dictionary_particle_transformation[
157 particle] = particle_transformation
158 self.dictionary_particle_name[particle] = name
160 raise TypeError(
"ParticleToSampleList: not the right transformation format for Rigid_Bodies, should be a tuple a floats")
162 if type(particle_transformation) == float:
163 self.dictionary_particle_transformation[
164 particle] = particle_transformation
165 self.dictionary_particle_name[particle] = name
167 raise TypeError(
"ParticleToSampleList: not the right transformation format sould be a float")
169 def get_particles_to_sample(self):
171 for particle
in self.dictionary_particle_type:
172 key = self.dictionary_particle_type[
173 particle] +
"ParticleToSampleList_" + self.dictionary_particle_name[particle] +
"_" + self.label
176 self.dictionary_particle_transformation[particle])
181 class Variance(object):
183 def __init__(self, model, tau, niter, prot, th_profile, write_data=False):
186 self.write_data = write_data
190 particles = IMP.atom.get_by_type(prot, IMP.atom.ATOM_TYPE)
191 self.particles = particles
193 self.refpos = [
IMP.core.XYZ(p).get_coordinates()
for p
in particles]
194 self.model_profile = th_profile
196 def perturb_particles(self, perturb=True):
197 for i, p
in enumerate(self.particles):
198 newpos = array(self.refpos[i])
200 newpos += random.normal(0, self.tau, 3)
204 def get_profile(self):
205 model_profile = self.model_profile
206 p = model_profile.calculate_profile(self.particles, IMP.saxs.CA_ATOMS)
207 return array([model_profile.get_intensity(i)
for i
in
208 range(model_profile.size())])
210 def init_variances(self):
212 N = self.model_profile.size()
213 a = self.profiles[0][:]
215 self.V = self.m * self.m.T
216 self.normm = linalg.norm(self.m)
217 self.normV = linalg.norm(self.V)
219 def update_variances(self):
220 a = matrix(self.profiles[-1])
221 n = float(len(self.profiles))
222 self.m = a.T / n + (n - 1) / n * self.m
223 self.V = a.T * a + self.V
224 self.oldnormm = self.normm
225 self.oldnormV = self.normV
226 self.normm = linalg.norm(self.m)
227 self.normV = linalg.norm(self.V)
228 self.diffm = (self.oldnormm - self.normm) / self.oldnormm
229 self.diffV = (self.oldnormV - self.normV) / self.oldnormV
231 def get_direct_stats(self, a):
236 for q, I
in enumerate(prof):
241 Sigma = (matrix(a - m))
242 Sigma = Sigma.T * Sigma / (nprof - 1)
243 mi = matrix(diag(1. / m))
244 Sigmarel = mi.T * Sigma * mi
245 return m, V, Sigma, Sigmarel
247 def store_data(self):
248 if not os.path.isdir(
'data'):
250 profiles = matrix(self.profiles)
251 self.directm, self.directV, self.Sigma, self.Sigmarel = \
252 self.get_direct_stats(array(profiles))
253 directV = self.directV
255 save(
'data/profiles', profiles)
257 fl = open(
'data/profiles.dat',
'w')
258 for i, l
in enumerate(array(profiles).T):
259 self.model_profile.get_q(i)
262 fl.write(
'%s ' % (k - self.directm[i]))
265 fl = open(
'data/profiles_rel.dat',
'w')
266 for i, l
in enumerate(array(profiles).T):
267 self.model_profile.get_q(i)
270 fl.write(
'%s ' % ((k - self.directm[i]) / self.directm[i]))
272 save(
'data/m', self.directm)
273 save(
'data/V', self.directV)
275 save(
'data/Sigma', Sigma)
277 fl = open(
'data/Sigma.dat',
'w')
278 model_profile = self.model_profile
279 for i
in range(model_profile.size()):
280 qi = model_profile.get_q(i)
281 for j
in range(model_profile.size()):
282 qj = model_profile.get_q(j)
283 vij = self.Sigma[i, j]
284 fl.write(
'%s %s %s\n' % (qi, qj, vij))
287 fl = open(
'data/eigenvals',
'w')
288 for i
in linalg.eigvalsh(Sigma):
290 Sigmarel = self.Sigmarel
291 save(
'data/Sigmarel', Sigmarel)
293 fl = open(
'data/Sigmarel.dat',
'w')
294 model_profile = self.model_profile
295 for i
in range(model_profile.size()):
296 qi = model_profile.get_q(i)
297 for j
in range(model_profile.size()):
298 qj = model_profile.get_q(j)
299 vij = self.Sigmarel[i, j]
300 fl.write(
'%s %s %s\n' % (qi, qj, vij))
303 fl = open(
'data/eigenvals_rel',
'w')
304 for i
in linalg.eigvalsh(Sigmarel):
307 fl = open(
'data/mean.dat',
'w')
308 for i
in range(len(self.directm)):
309 qi = self.model_profile.get_q(i)
311 fl.write(
'%s ' % self.directm[i])
312 fl.write(
'%s ' % sqrt(self.Sigma[i, i]))
315 def try_chol(self, jitter):
318 linalg.cholesky(Sigma + matrix(eye(len(Sigma))) * jitter)
319 except linalg.LinAlgError:
320 print(
"Decomposition failed with jitter =", jitter)
322 print(
"Successful decomposition with jitter =", jitter)
325 self.profiles = [self.get_profile()]
327 for n
in range(self.niter):
328 self.perturb_particles()
329 self.profiles.append(self.get_profile())
341 def get_cov(self, relative=True):
351 number_of_cross_links=10,
352 ambiguity_probability=0.1,
353 confidence_score_range=[0,100],
354 avoid_same_particles=
False):
355 '''Return a random cross-link dataset as a string.
356 Every line is a residue pair, together with UniqueIdentifier
359 residue_pairs=get_random_residue_pairs(representation, resolution, number_of_cross_links, avoid_same_particles=avoid_same_particles)
362 cmin=float(min(confidence_score_range))
363 cmax=float(max(confidence_score_range))
367 for (name1, r1, name2, r2)
in residue_pairs:
368 if random.random() > ambiguity_probability:
370 score=random.random()*(cmax-cmin)+cmin
371 dataset+=str(name1)+
" "+str(name2)+
" "+str(r1)+
" "+str(r2)+
" "+str(score)+
" "+str(unique_identifier)+
"\n"
378 def get_cross_link_data(directory, filename, dist, omega, sigma,
379 don=
None, doff=
None, prior=0, type_of_profile=
"gofr"):
381 (distmin, distmax, ndist) = dist
382 (omegamin, omegamax, nomega) = omega
383 (sigmamin, sigmamax, nsigma) = sigma
389 dictionary = eval(line)
392 xpot = dictionary[directory][filename][
"distance"]
393 pot = dictionary[directory][filename][type_of_profile]
395 dist_grid =
get_grid(distmin, distmax, ndist,
False)
396 omega_grid = get_log_grid(omegamin, omegamax, nomega)
397 sigma_grid = get_log_grid(sigmamin, sigmamax, nsigma)
399 if not don
is None and not doff
is None:
421 def get_cross_link_data_from_length(length, xxx_todo_changeme3, xxx_todo_changeme4, xxx_todo_changeme5):
422 (distmin, distmax, ndist) = xxx_todo_changeme3
423 (omegamin, omegamax, nomega) = xxx_todo_changeme4
424 (sigmamin, sigmamax, nsigma) = xxx_todo_changeme5
426 dist_grid =
get_grid(distmin, distmax, ndist,
False)
427 omega_grid = get_log_grid(omegamin, omegamax, nomega)
428 sigma_grid = get_log_grid(sigmamin, sigmamax, nsigma)
434 def get_grid(gmin, gmax, ngrid, boundaries):
436 dx = (gmax - gmin) / float(ngrid)
437 for i
in range(0, ngrid + 1):
438 if(
not boundaries
and i == 0):
440 if(
not boundaries
and i == ngrid):
442 grid.append(gmin + float(i) * dx)
448 def get_log_grid(gmin, gmax, ngrid):
450 for i
in range(0, ngrid + 1):
451 grid.append(gmin * exp(float(i) / ngrid * log(gmax / gmin)))
459 example '"{ID_Score}" > 28 AND "{Sample}" ==
460 "%10_1%" OR ":Sample}" == "%10_2%" OR ":Sample}"
461 == "%10_3%" OR ":Sample}" == "%8_1%" OR ":Sample}" == "%8_2%"'
464 import pyparsing
as pp
466 operator = pp.Regex(
">=|<=|!=|>|<|==|in").setName(
"operator")
467 value = pp.QuotedString(
469 r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?")
470 identifier = pp.Word(pp.alphas, pp.alphanums +
"_")
471 comparison_term = identifier | value
472 condition = pp.Group(comparison_term + operator + comparison_term)
474 expr = pp.operatorPrecedence(condition, [
475 (
"OR", 2, pp.opAssoc.LEFT, ),
476 (
"AND", 2, pp.opAssoc.LEFT, ),
479 parsedstring = str(expr.parseString(inputstring)) \
485 .replace(
"{",
"float(entry['") \
486 .replace(
"}",
"'])") \
487 .replace(
":",
"str(entry['") \
488 .replace(
"}",
"'])") \
489 .replace(
"AND",
"and") \
494 def open_file_or_inline_text(filename):
496 fl = open(filename,
"r")
498 fl = filename.split(
"\n")
505 for i
in range(0, len(prot0) - 1):
506 for j
in range(i + 1, len(prot0)):
509 drmsd += (dist0 - dist1) ** 2
511 return math.sqrt(drmsd / npairs)
516 def get_ids_from_fasta_file(fastafile):
518 with open(fastafile)
as ff:
527 this function works with plain hierarchies, as read from the pdb,
528 no multi-scale hierarchies
535 atom_type=IMP.atom.AT_CA)
543 print(
"get_closest_residue_position: exiting while loop without result")
545 p = sel.get_selected_particles()
550 print(
"get_closest_residue_position: got NO residues for hierarchy %s and residue %i" % (hier, resindex))
551 raise Exception(
"get_closest_residue_position: got NO residues for hierarchy %s and residue %i" % (
554 raise ValueError(
"got multiple residues for hierarchy %s and residue %i; the list of particles is %s" % (hier, resindex, str([pp.get_name()
for pp
in p])))
559 Get the xyz position of the terminal residue at the given resolution.
560 @param hier hierarchy containing the terminal residue
561 @param terminus either 'N' or 'C'
562 @param resolution resolution to use.
570 if max(residues) >= termresidue
and not termresidue
is None:
571 termresidue = max(residues)
573 elif termresidue
is None:
574 termresidue = max(residues)
576 elif terminus ==
"N":
577 if min(residues) <= termresidue
and not termresidue
is None:
578 termresidue = min(residues)
580 elif termresidue
is None:
581 termresidue = min(residues)
584 raise ValueError(
"terminus argument should be either N or C")
590 Get the xyz position of the terminal residue at the given resolution.
591 @param hier hierarchy containing the terminal residue
592 @param terminus either 'N' or 'C'
593 @param resolution resolution to use.
599 resolution=resolution,
606 if max(residues) >= termresidue
and not termresidue
is None:
607 termresidue = max(residues)
609 elif termresidue
is None:
610 termresidue = max(residues)
612 elif terminus ==
"N":
613 if min(residues) <= termresidue
and not termresidue
is None:
614 termresidue = min(residues)
616 elif termresidue
is None:
617 termresidue = min(residues)
620 raise ValueError(
"terminus argument should be either N or C")
627 Return the residue index gaps and contiguous segments in the hierarchy.
629 @param hierarchy hierarchy to examine
630 @param start first residue index
631 @param end last residue index
633 @return A list of lists of the form
634 [[1,100,"cont"],[101,120,"gap"],[121,200,"cont"]]
637 for n, rindex
in enumerate(range(start, end + 1)):
639 atom_type=IMP.atom.AT_CA)
641 if len(sel.get_selected_particles()) == 0:
645 rindexcont = start - 1
646 if rindexgap == rindex - 1:
652 gaps.append([rindex, rindex,
"gap"])
658 rindexgap = start - 1
660 if rindexcont == rindex - 1:
667 gaps.append([rindex, rindex,
"cont"])
678 def set_map_element(self, xvalue, yvalue):
679 self.map[xvalue] = yvalue
681 def get_map_element(self, invalue):
682 if type(invalue) == float:
686 dist = (invalue - x) * (invalue - x)
695 return self.map[minx]
696 elif type(invalue) == str:
697 return self.map[invalue]
699 raise TypeError(
"wrong type for map")
705 selection_arguments=
None,
707 name_is_ambiguous=
False,
711 representation_type=
None):
713 this function uses representation=SimplifiedModel
714 it returns the corresponding selected particles
715 representation_type="Beads", "Res:X", "Densities", "Representation", "Molecule"
718 if resolution
is None:
720 resolution_particles =
None
721 hierarchies_particles =
None
722 names_particles =
None
723 residue_range_particles =
None
724 residue_particles =
None
725 representation_type_particles =
None
727 if not resolution
is None:
728 resolution_particles = []
729 hs = representation.get_hierarchies_at_given_resolution(resolution)
733 if not hierarchies
is None:
734 hierarchies_particles = []
735 for h
in hierarchies:
740 if name_is_ambiguous:
741 for namekey
in representation.hier_dict:
744 representation.hier_dict[namekey])
745 elif name
in representation.hier_dict:
748 print(
"select: component %s is not there" % name)
750 if not first_residue
is None and not last_residue
is None:
752 residue_indexes=range(first_residue, last_residue + 1))
754 for p
in sel.get_selected_particles()]
756 if not residue
is None:
759 for p
in sel.get_selected_particles()]
761 if not representation_type
is None:
762 representation_type_particles = []
763 if representation_type ==
"Molecule":
764 for name
in representation.hier_representation:
765 for repr_type
in representation.hier_representation[name]:
766 if repr_type ==
"Beads" or "Res:" in repr_type:
767 h = representation.hier_representation[name][repr_type]
770 elif representation_type ==
"PDB":
771 for name
in representation.hier_representation:
772 for repr_type
in representation.hier_representation[name]:
773 if repr_type ==
"Res:" in repr_type:
774 h = representation.hier_representation[name][repr_type]
778 for name
in representation.hier_representation:
779 h = representation.hier_representation[
784 selections = [hierarchies_particles, names_particles,
785 residue_range_particles, residue_particles, representation_type_particles]
787 if resolution
is None:
788 selected_particles = set(allparticles)
790 selected_particles = set(resolution_particles)
794 selected_particles = (set(s) & selected_particles)
796 return list(selected_particles)
803 name_is_ambiguous=
False):
804 if isinstance(tupleselection, tuple)
and len(tupleselection) == 3:
806 name=tupleselection[2],
807 first_residue=tupleselection[0],
808 last_residue=tupleselection[1],
809 name_is_ambiguous=name_is_ambiguous)
810 elif isinstance(tupleselection, str):
813 name_is_ambiguous=name_is_ambiguous)
815 raise ValueError(
'you passed something bad to select_by_tuple()')
817 particles = IMP.pmi.tools.sort_by_residues(particles)
822 def get_db_from_csv(csvfilename):
825 with open(csvfilename)
as fh:
826 csvr = csv.DictReader(fh)
833 """Store the representations for a system."""
838 self.root_hierarchy_dict = {}
839 self.preroot_fragment_hierarchy_dict = {}
840 self.particle_to_name = {}
843 def add_name(self, name):
844 if name
not in self.db:
847 def add_residue_number(self, name, resn):
850 if resn
not in self.db[name]:
851 self.db[name][resn] = {}
853 def add_resolution(self, name, resn, resolution):
855 resolution = float(resolution)
857 self.add_residue_number(name, resn)
858 if resolution
not in self.db[name][resn]:
859 self.db[name][resn][resolution] = []
863 resolution = float(resolution)
865 self.add_residue_number(name, resn)
866 self.add_resolution(name, resn, resolution)
867 self.db[name][resn][resolution] += particles
869 (rh, prf) = self.get_root_hierarchy(p)
870 self.root_hierarchy_dict[p] = rh
871 self.preroot_fragment_hierarchy_dict[p] = prf
872 self.particle_to_name[p] = name
873 if self.model
is None:
874 self.model = particles[0].get_model()
880 names = list(self.db.keys())
886 resolution = float(resolution)
887 return self.db[name][resn][resolution]
889 def get_particles_at_closest_resolution(self, name, resn, resolution):
891 resolution = float(resolution)
892 closestres = min(self.get_residue_resolutions(name, resn),
893 key=
lambda x: abs(float(x) - float(resolution)))
894 return self.get_particles(name, resn, closestres)
896 def get_residue_resolutions(self, name, resn):
898 resolutions = list(self.db[name][resn].keys())
902 def get_molecule_resolutions(self, name):
904 for resn
in self.db[name]:
905 resolutions.update(list(self.db[name][resn].keys()))
909 def get_residue_numbers(self, name):
910 residue_numbers = list(self.db[name].keys())
911 residue_numbers.sort()
912 return residue_numbers
914 def get_particles_by_resolution(self, name, resolution):
915 resolution = float(resolution)
917 for resn
in self.get_residue_numbers(name):
918 result = self.get_particles_at_closest_resolution(
922 pstemp = [p
for p
in result
if p
not in particles]
926 def get_all_particles_by_resolution(self, resolution):
927 resolution = float(resolution)
929 for name
in self.get_names():
930 particles += self.get_particles_by_resolution(name, resolution)
933 def get_root_hierarchy(self, particle):
934 prerootfragment = particle
944 prerootfragment = particle
950 def get_all_root_hierarchies_by_resolution(self, resolution):
952 resolution = float(resolution)
953 particles = self.get_all_particles_by_resolution(resolution)
955 rh = self.root_hierarchy_dict[p]
956 if rh
not in hierarchies:
960 def get_preroot_fragments_by_resolution(self, name, resolution):
962 resolution = float(resolution)
963 particles = self.get_particles_by_resolution(name, resolution)
965 fr = self.preroot_fragment_hierarchy_dict[p]
966 if fr
not in fragments:
970 def show(self, name):
972 for resn
in self.get_residue_numbers(name):
974 for resolution
in self.get_residue_resolutions(name, resn):
975 print(
"----", resolution)
976 for p
in self.get_particles(name, resn, resolution):
977 print(
"--------", p.get_name())
981 '''Return the component name provided a particle and a list of names'''
983 protname = root.get_name()
985 while not protname
in list_of_names:
986 root0 = root.get_parent()
989 protname = root0.get_name()
994 if "Beads" in protname:
997 return (protname, is_a_bead)
1002 Retrieve the residue indexes for the given particle.
1004 The particle must be an instance of Fragment,Residue or Atom
1005 or else returns an empty list
1020 def sort_by_residues(particles):
1023 sorted_particles_residues = sorted(
1025 key=
lambda tup: tup[1])
1026 particles = [p[0]
for p
in sorted_particles_residues]
1030 def get_residue_to_particle_map(particles):
1032 particles = sort_by_residues(particles)
1035 return dict(zip(particles_residues, particles))
1043 """Synchronize data over a parallel run"""
1044 from mpi4py
import MPI
1045 comm = MPI.COMM_WORLD
1046 rank = comm.Get_rank()
1047 number_of_processes = comm.size
1050 comm.send(data, dest=0, tag=11)
1053 for i
in range(1, number_of_processes):
1054 data_tmp = comm.recv(source=i, tag=11)
1055 if type(data) == list:
1057 elif type(data) == dict:
1058 data.update(data_tmp)
1060 raise TypeError(
"data not supported, use list or dictionaries")
1062 for i
in range(1, number_of_processes):
1063 comm.send(data, dest=i, tag=11)
1066 data = comm.recv(source=0, tag=11)
1070 """Synchronize data over a parallel run"""
1071 from mpi4py
import MPI
1072 comm = MPI.COMM_WORLD
1073 rank = comm.Get_rank()
1074 number_of_processes = comm.size
1077 comm.send(data, dest=0, tag=11)
1079 for i
in range(1, number_of_processes):
1080 data_tmp = comm.recv(source=i, tag=11)
1082 data[k]+=data_tmp[k]
1084 for i
in range(1, number_of_processes):
1085 comm.send(data, dest=i, tag=11)
1088 data = comm.recv(source=0, tag=11)
1098 Yield all sublists of length >= lmin and <= lmax
1106 for j
in range(i + 1, n):
1107 if len(l[i:j]) <= lmax
and len(l[i:j]) >= lmin:
1111 def flatten_list(l):
1112 return [item
for sublist
in l
for item
in sublist]
1116 """ Yield successive length-sized chunks from a list.
1118 for i
in range(0, len(list), length):
1119 yield list[i:i + length]
1122 def chunk_list_into_segments(seq, num):
1124 avg = len(seq) / float(num)
1128 while last < len(seq):
1129 out.append(seq[int(last):int(last + avg)])
1141 Apply a translation to a hierarchy along the input vector.
1145 if type(translation_vector) == list:
1164 def translate_hierarchies(hierarchies, translation_vector):
1165 for h
in hierarchies:
1169 def translate_hierarchies_to_reference_frame(hierarchies):
1174 for h
in hierarchies:
1184 IMP.pmi.tools.translate_hierarchies(hierarchies, (-xc, -yc, -zc))
1191 def normal_density_function(expected_value, sigma, x):
1193 1 / math.sqrt(2 * math.pi) / sigma *
1194 math.exp(-(x - expected_value) ** 2 / 2 / sigma / sigma)
1198 def log_normal_density_function(expected_value, sigma, x):
1200 1 / math.sqrt(2 * math.pi) / sigma / x *
1201 math.exp(-(math.log(x / expected_value) ** 2 / 2 / sigma / sigma))
1205 def get_random_residue_pairs(representation, resolution,
1208 avoid_same_particles=
False,
1213 names=list(representation.hier_dict.keys())
1216 prot = representation.hier_dict[name]
1217 particles +=
select(representation,name=name,resolution=resolution)
1218 random_residue_pairs = []
1219 while len(random_residue_pairs)<=number:
1220 p1 = random.choice(particles)
1221 p2 = random.choice(particles)
1222 if max_distance
is not None and \
1227 if r1==r2
and avoid_same_particles:
continue
1228 name1 = representation.get_prot_name_from_particle(p1)
1229 name2 = representation.get_prot_name_from_particle(p2)
1230 random_residue_pairs.append((name1, r1, name2, r2))
1232 return random_residue_pairs
1235 def get_random_data_point(
1241 begin_end_nbins_tuple,
1246 begin = begin_end_nbins_tuple[0]
1247 end = begin_end_nbins_tuple[1]
1248 nbins = begin_end_nbins_tuple[2]
1251 fmod_grid =
get_grid(begin, end, nbins,
True)
1253 fmod_grid = get_log_grid(begin, end, nbins)
1260 for i
in range(0, ntrials):
1261 a.append([random.random(),
True])
1264 for j
in range(1, len(fmod_grid)):
1266 fjm1 = fmod_grid[j - 1]
1270 pj = normal_density_function(expected_value, sigma, fj)
1271 pjm1 = normal_density_function(expected_value, sigma, fjm1)
1273 pj = log_normal_density_function(expected_value, sigma, fj)
1274 pjm1 = log_normal_density_function(expected_value, sigma, fjm1)
1276 norm += (pj + pjm1) / 2.0 * df
1282 for i
in range(len(cumul)):
1285 if (aa[0] <= cumul[i] / norm
and aa[1]):
1286 random_points.append(
1287 int(fmod_grid[i] / sensitivity) * sensitivity)
1291 random_points = [expected_value] * ntrials
1293 for i
in range(len(random_points)):
1294 if random.random() < outlierprob:
1295 a = random.uniform(begin, end)
1296 random_points[i] = int(a / sensitivity) * sensitivity
1297 print(random_points)
1299 for i in range(ntrials):
1300 if random.random() > OUTLIERPROB_:
1301 r=truncnorm.rvs(0.0,1.0,expected_value,BETA_)
1302 if r>1.0: print r,expected_value,BETA_
1305 random_points.append(int(r/sensitivity)*sensitivity)
1310 for r
in random_points:
1314 rmean /= float(ntrials)
1315 rmean2 /= float(ntrials)
1316 stddev = math.sqrt(max(rmean2 - rmean * rmean, 0.))
1317 return rmean, stddev
1319 def print_multicolumn(list_of_strings, ncolumns=2, truncate=40):
1325 for i
in range(len(l) % cols):
1328 split = [l[i:i + len(l) / cols]
for i
in range(0, len(l), len(l) / cols)]
1329 for row
in zip(*split):
1330 print(
"".join(str.ljust(i, truncate)
for i
in row))
1333 '''Change color code to hexadecimal to rgb'''
1335 self._NUMERALS =
'0123456789abcdefABCDEF'
1336 self._HEXDEC = dict((v, int(v, 16))
for v
in (x+y
for x
in self._NUMERALS
for y
in self._NUMERALS))
1337 self.LOWERCASE, self.UPPERCASE =
'x',
'X'
1339 def rgb(self,triplet):
1340 return float(self._HEXDEC[triplet[0:2]]), float(self._HEXDEC[triplet[2:4]]), float(self._HEXDEC[triplet[4:6]])
1342 def triplet(self,rgb, lettercase=None):
1343 if lettercase
is None: lettercase=self.LOWERCASE
1344 return format(rgb[0]<<16 | rgb[1]<<8 | rgb[2],
'06'+lettercase)
1348 class OrderedSet(collections.MutableSet):
1350 def __init__(self, iterable=None):
1352 end += [
None, end, end]
1354 if iterable
is not None:
1358 return len(self.map)
1360 def __contains__(self, key):
1361 return key
in self.map
1364 if key
not in self.map:
1367 curr[2] = end[1] = self.map[key] = [key, curr, end]
1369 def discard(self, key):
1371 key, prev, next = self.map.pop(key)
1378 while curr
is not end:
1382 def __reversed__(self):
1385 while curr
is not end:
1389 def pop(self, last=True):
1391 raise KeyError(
'set is empty')
1393 key = self.end[1][0]
1395 key = self.end[2][0]
1401 return '%s()' % (self.__class__.__name__,)
1402 return '%s(%r)' % (self.__class__.__name__, list(self))
1404 def __eq__(self, other):
1405 if isinstance(other, OrderedSet):
1406 return len(self) == len(other)
and list(self) == list(other)
1407 return set(self) == set(other)
1411 """Store objects in order they were added, but with default type.
1412 Source: http://stackoverflow.com/a/4127426/2608793
1414 def __init__(self, *args, **kwargs):
1416 self.default_factory =
None
1418 if not (args[0]
is None or callable(args[0])):
1419 raise TypeError(
'first argument must be callable or None')
1420 self.default_factory = args[0]
1422 super(OrderedDefaultDict, self).__init__(*args, **kwargs)
1424 def __missing__ (self, key):
1425 if self.default_factory
is None:
1427 self[key] = default = self.default_factory()
1430 def __reduce__(self):
1431 args = (self.default_factory,)
if self.default_factory
else ()
1432 return self.__class__, args,
None,
None, self.iteritems()
1437 """Extract frame from RMF file and fill coordinates. Must be identical topology.
1438 @param hier The (System) hierarchy to fill (e.g. after you've built it)
1439 @param rmf_fn The file to extract from
1440 @param frame_num The frame number to extract
1442 rh = RMF.open_rmf_file_read_only(rmf_fn)
1450 selection_tuple=
None):
1451 """Adapt things for PMI (degrees of freedom, restraints, ...)
1452 Returns list of list of hierarchies, separated into Molecules if possible.
1453 (iterable of ^2) hierarchy -> returns input as list of list of hierarchies, only one entry.
1454 (iterable of ^2) PMI::System/State/Molecule/TempResidue ->
1455 returns residue hierarchies, grouped in molecules, at requested resolution
1456 @param stuff Can be one of the following inputs:
1457 IMP Hierarchy, PMI System/State/Molecule/TempResidue, or a list/set (of list/set) of them.
1458 Must be uniform input, however. No mixing object types.
1459 @param pmi_resolution For selecting, only does it if you pass PMI objects. Set it to "all"
1460 if you want all resolutions!
1461 @param flatten Set to True if you just want all hierarchies in one list.
1462 \note since this relies on IMP::atom::Selection, this will not return any objects if they weren't built!
1463 But there should be no problem if you request unbuilt residues, they should be ignored.
1467 def get_ok_iter(it):
1468 type_set = set(type(a)
for a
in it)
1469 if len(type_set)!=1:
1470 raise Exception(
'input_adaptor: can only pass one type of object at a time')
1471 xtp = type_set.pop()
1476 if hasattr(stuff,
'__iter__'):
1479 tp,thelist = get_ok_iter(stuff)
1482 if hasattr(next(iter(thelist)),
'__iter__'):
1483 flatlist = [i
for sublist
in thelist
for i
in sublist]
1484 tp,thelist = get_ok_iter(flatlist)
1500 for system
in stuff:
1501 for state
in system.get_states():
1502 mdict = state.get_molecules()
1503 for molname
in mdict:
1504 for copy
in mdict[molname]:
1505 indexes_per_mol[copy] += [r.get_index()
for r
in copy.get_residues()]
1508 mdict = state.get_molecules()
1509 for molname
in mdict:
1510 for copy
in mdict[molname]:
1511 indexes_per_mol[copy] += [r.get_index()
for r
in copy.get_residues()]
1513 for molecule
in stuff:
1514 indexes_per_mol[molecule] += [r.get_index()
for r
in molecule.get_residues()]
1516 for tempres
in stuff:
1517 indexes_per_mol[tempres.get_molecule()].append(tempres.get_index())
1518 for mol
in indexes_per_mol:
1519 if pmi_resolution==
'all':
1523 residue_indexes=indexes_per_mol[mol])
1526 resolution=pmi_resolution,
1527 residue_indexes=indexes_per_mol[mol])
1528 ps = sel.get_selected_particles()
1535 raise Exception(
'input_adaptor: you passed something of type',tp)
1537 raise Exception(
'input_adaptor: you passed something of type',tp)
1539 if flatten
and pmi_input:
1540 return [h
for sublist
in hier_list
for h
in sublist]
1544 def get_residue_type_from_one_letter_code(code):
1545 threetoone = {
'ALA':
'A',
'ARG':
'R', 'ASN': 'N', 'ASP': 'D',
1546 'CYS':
'C',
'GLU':
'E',
'GLN':
'Q',
'GLY':
'G',
1547 'HIS':
'H',
'ILE':
'I',
'LEU':
'L',
'LYS':
'K',
1548 'MET':
'M',
'PHE':
'F',
'PRO':
'P',
'SER':
'S',
1549 'THR':
'T',
'TRP':
'W',
'TYR':
'Y',
'VAL':
'V',
'UNK':
'X'}
1551 for k
in threetoone:
1552 one_to_three[threetoone[k]] = k
1557 """ Just get the leaves from a list of hierarchies """
1558 lvs = list(itertools.chain.from_iterable(
IMP.atom.get_leaves(item)
for item
in list_of_hs))
1565 """Perform selection using the usual keywords but return ALL resolutions (BEADS and GAUSSIANS).
1566 Returns in flat list!
1571 if hier
is not None:
1574 print(
"WARNING: You passed nothing to select_at_all_resolutions()")
1581 raise Exception(
'select_at_all_resolutions: you have to pass an IMP Hierarchy')
1583 raise Exception(
'select_at_all_resolutions: you have to pass an IMP Hierarchy')
1584 if 'resolution' in kwargs
or 'representation_type' in kwargs:
1585 raise Exception(
"don't pass resolution or representation_type to this function")
1587 representation_type=IMP.atom.BALLS,**kwargs)
1589 representation_type=IMP.atom.DENSITIES,**kwargs)
1590 ret |= OrderedSet(selB.get_selected_particles())
1591 ret |= OrderedSet(selD.get_selected_particles())
1600 """Utility to retrieve particles from a hierarchy within a
1601 zone around a set of ps.
1602 @param hier The hierarchy in which to look for neighbors
1603 @param target_ps The particles for zoning
1604 @param sel_zone The maximum distance
1605 @param entire_residues If True, will grab entire residues
1606 @param exclude_backbone If True, will only return sidechain particles
1610 backbone_types=[
'C',
'N',
'CB',
'O']
1611 if exclude_backbone:
1613 for n
in backbone_types])
1614 test_ps = test_sel.get_selected_particles()
1615 nn = IMP.algebra.NearestNeighbor3D([
IMP.core.XYZ(p).get_coordinates()
1618 for target
in target_ps:
1619 zone|=set(nn.get_in_ball(
IMP.core.XYZ(target).get_coordinates(),sel_zone))
1620 zone_ps = [test_ps[z]
for z
in zone]
1625 zone_ps = [h.get_particle()
for h
in final_ps]
1630 """Returns unique objects in original order"""
1634 if not hasattr(hiers,
'__iter__'):
1641 rbs_ordered.append(rb)
1646 rbs_ordered.append(rb)
1650 return rbs_ordered,beads
1653 max_translation=300., max_rotation=2.0 * pi,
1654 avoidcollision_rb=
True, avoidcollision_fb=
False,
1655 cutoff=10.0, niterations=100,
1657 excluded_rigid_bodies=[],
1658 hierarchies_excluded_from_collision=[],
1660 """Shuffle particles. Used to restart the optimization.
1661 The configuration of the system is initialized by placing each
1662 rigid body and each bead randomly in a box with a side of
1663 max_translation angstroms, and far enough from each other to
1664 prevent any steric clashes. The rigid bodies are also randomly rotated.
1665 @param root_hier The hierarchy to shuffle. Will find rigid bodies and flexible beads.
1666 @param max_translation Max translation (rbs and flexible beads)
1667 @param max_rotation Max rotation (rbs only)
1668 @param avoidcollision_rb check if the particle/rigid body was
1669 placed close to another particle; uses the optional
1670 arguments cutoff and niterations
1671 @param avoidcollision_fb Advanced. Generally you want this False because it's hard to shuffle beads.
1672 @param cutoff Distance less than this is a collision
1673 @param niterations How many times to try avoiding collision
1674 @param bounding_box Only shuffle particles within this box. Defined by ((x1,y1,z1),(x2,y2,z2)).
1675 @param excluded_rigid_bodies Don't shuffle these rigid body objects
1676 @param hierarchies_excluded_from_collision Don't count collision with these bodies
1677 @param verbose Give more output
1678 \note Best to only call this function after you've set up degrees of freedom
1684 if len(rigid_bodies)>0:
1685 mdl = rigid_bodies[0].get_model()
1686 elif len(flexible_beads)>0:
1687 mdl = flexible_beads[0].get_model()
1689 raise Exception(
"You passed something weird to shuffle_configuration")
1691 if len(rigid_bodies) == 0:
1692 print(
"shuffle_configuration: rigid bodies were not intialized")
1696 gcpf.set_distance(cutoff)
1697 allparticleindexes = []
1698 hierarchies_excluded_from_collision_indexes = []
1701 for h
in hierarchies_excluded_from_collision:
1703 hierarchies_excluded_from_collision_indexes.append(p.get_particle_index())
1707 allparticleindexes.append(p.get_particle_index())
1710 hierarchies_excluded_from_collision_indexes.append(p.get_particle_index())
1711 if not bounding_box
is None:
1712 ((x1, y1, z1), (x2, y2, z2)) = bounding_box
1717 allparticleindexes = list(
1718 set(allparticleindexes) - set(hierarchies_excluded_from_collision_indexes))
1720 print(
'shuffling', len(rigid_bodies),
'rigid bodies')
1721 for rb
in rigid_bodies:
1722 if rb
not in excluded_rigid_bodies:
1723 if avoidcollision_rb:
1725 rbindexes = rb.get_member_particle_indexes()
1728 set(rbindexes) - set(hierarchies_excluded_from_collision_indexes))
1729 otherparticleindexes = list(
1730 set(allparticleindexes) - set(rbindexes))
1732 if len(otherparticleindexes)
is None:
1736 while niter < niterations:
1737 rbxyz = (rb.get_x(), rb.get_y(), rb.get_z())
1740 if not bounding_box
is None:
1746 translation-old_coord)
1755 if avoidcollision_rb:
1758 gcpf.get_close_pairs(
1760 otherparticleindexes,
1767 print(
"shuffle_configuration: rigid body placed close to other %d particles, trying again..." % npairs)
1768 print(
"shuffle_configuration: rigid body name: " + rb.get_name())
1769 if niter == niterations:
1770 raise ValueError(
"tried the maximum number of iterations to avoid collisions, increase the distance cutoff")
1774 print(
'shuffling', len(flexible_beads),
'flexible beads')
1775 for fb
in flexible_beads:
1776 if avoidcollision_fb:
1778 otherparticleindexes = list(
1779 set(allparticleindexes) - set(fbindexes))
1780 if len(otherparticleindexes)
is None:
1783 while niter < niterations:
1785 if not bounding_box
is None:
1802 if avoidcollision_fb:
1805 gcpf.get_close_pairs(
1807 otherparticleindexes,
1813 print(
"shuffle_configuration: floppy body placed close to other %d particles, trying again..." % npairs)
1814 if niter == niterations:
1815 raise ValueError(
"tried the maximum number of iterations to avoid collisions, increase the distance cutoff")
1822 "reds":[(
"maroon",
"#800000",(128,0,0)),(
"dark red",
"#8B0000",(139,0,0)),
1823 (
"brown",
"#A52A2A",(165,42,42)),(
"firebrick",
"#B22222",(178,34,34)),
1824 (
"crimson",
"#DC143C",(220,20,60)),(
"red",
"#FF0000",(255,0,0)),
1825 (
"tomato",
"#FF6347",(255,99,71)),(
"coral",
"#FF7F50",(255,127,80)),
1826 (
"indian red",
"#CD5C5C",(205,92,92)),(
"light coral",
"#F08080",(240,128,128)),
1827 (
"dark salmon",
"#E9967A",(233,150,122)),(
"salmon",
"#FA8072",(250,128,114)),
1828 (
"light salmon",
"#FFA07A",(255,160,122)),(
"orange red",
"#FF4500",(255,69,0)),
1829 (
"dark orange",
"#FF8C00",(255,140,0))],
1830 "yellows":[(
"orange",
"#FFA500",(255,165,0)),(
"gold",
"#FFD700",(255,215,0)),
1831 (
"dark golden rod",
"#B8860B",(184,134,11)),(
"golden rod",
"#DAA520",(218,165,32)),
1832 (
"pale golden rod",
"#EEE8AA",(238,232,170)),(
"dark khaki",
"#BDB76B",(189,183,107)),
1833 (
"khaki",
"#F0E68C",(240,230,140)),(
"olive",
"#808000",(128,128,0)),
1834 (
"yellow",
"#FFFF00",(255,255,0)),(
"antique white",
"#FAEBD7",(250,235,215)),
1835 (
"beige",
"#F5F5DC",(245,245,220)),(
"bisque",
"#FFE4C4",(255,228,196)),
1836 (
"blanched almond",
"#FFEBCD",(255,235,205)),(
"wheat",
"#F5DEB3",(245,222,179)),
1837 (
"corn silk",
"#FFF8DC",(255,248,220)),(
"lemon chiffon",
"#FFFACD",(255,250,205)),
1838 (
"light golden rod yellow",
"#FAFAD2",(250,250,210)),(
"light yellow",
"#FFFFE0",(255,255,224))],
1839 "greens":[(
"yellow green",
"#9ACD32",(154,205,50)),(
"dark olive green",
"#556B2F",(85,107,47)),
1840 (
"olive drab",
"#6B8E23",(107,142,35)),(
"lawn green",
"#7CFC00",(124,252,0)),
1841 (
"chart reuse",
"#7FFF00",(127,255,0)),(
"green yellow",
"#ADFF2F",(173,255,47)),
1842 (
"dark green",
"#006400",(0,100,0)),(
"green",
"#008000",(0,128,0)),
1843 (
"forest green",
"#228B22",(34,139,34)),(
"lime",
"#00FF00",(0,255,0)),
1844 (
"lime green",
"#32CD32",(50,205,50)),(
"light green",
"#90EE90",(144,238,144)),
1845 (
"pale green",
"#98FB98",(152,251,152)),(
"dark sea green",
"#8FBC8F",(143,188,143)),
1846 (
"medium spring green",
"#00FA9A",(0,250,154)),(
"spring green",
"#00FF7F",(0,255,127)),
1847 (
"sea green",
"#2E8B57",(46,139,87)),(
"medium aqua marine",
"#66CDAA",(102,205,170)),
1848 (
"medium sea green",
"#3CB371",(60,179,113)),(
"light sea green",
"#20B2AA",(32,178,170)),
1849 (
"dark slate gray",
"#2F4F4F",(47,79,79)),(
"teal",
"#008080",(0,128,128)),
1850 (
"dark cyan",
"#008B8B",(0,139,139))],
1851 "blues":[(
"dark turquoise",
"#00CED1",(0,206,209)),
1852 (
"turquoise",
"#40E0D0",(64,224,208)),(
"medium turquoise",
"#48D1CC",(72,209,204)),
1853 (
"pale turquoise",
"#AFEEEE",(175,238,238)),(
"aqua marine",
"#7FFFD4",(127,255,212)),
1854 (
"powder blue",
"#B0E0E6",(176,224,230)),(
"cadet blue",
"#5F9EA0",(95,158,160)),
1855 (
"steel blue",
"#4682B4",(70,130,180)),(
"corn flower blue",
"#6495ED",(100,149,237)),
1856 (
"deep sky blue",
"#00BFFF",(0,191,255)),(
"dodger blue",
"#1E90FF",(30,144,255)),
1857 (
"light blue",
"#ADD8E6",(173,216,230)),(
"sky blue",
"#87CEEB",(135,206,235)),
1858 (
"light sky blue",
"#87CEFA",(135,206,250)),(
"midnight blue",
"#191970",(25,25,112)),
1859 (
"navy",
"#000080",(0,0,128)),(
"dark blue",
"#00008B",(0,0,139)),
1860 (
"medium blue",
"#0000CD",(0,0,205)),(
"blue",
"#0000FF",(0,0,255)),(
"royal blue",
"#4169E1",(65,105,225)),
1861 (
"aqua",
"#00FFFF",(0,255,255)),(
"cyan",
"#00FFFF",(0,255,255)),(
"light cyan",
"#E0FFFF",(224,255,255))],
1862 "violets":[(
"blue violet",
"#8A2BE2",(138,43,226)),(
"indigo",
"#4B0082",(75,0,130)),
1863 (
"dark slate blue",
"#483D8B",(72,61,139)),(
"slate blue",
"#6A5ACD",(106,90,205)),
1864 (
"medium slate blue",
"#7B68EE",(123,104,238)),(
"medium purple",
"#9370DB",(147,112,219)),
1865 (
"dark magenta",
"#8B008B",(139,0,139)),(
"dark violet",
"#9400D3",(148,0,211)),
1866 (
"dark orchid",
"#9932CC",(153,50,204)),(
"medium orchid",
"#BA55D3",(186,85,211)),
1867 (
"purple",
"#800080",(128,0,128)),(
"thistle",
"#D8BFD8",(216,191,216)),
1868 (
"plum",
"#DDA0DD",(221,160,221)),(
"violet",
"#EE82EE",(238,130,238)),
1869 (
"magenta / fuchsia",
"#FF00FF",(255,0,255)),(
"orchid",
"#DA70D6",(218,112,214)),
1870 (
"medium violet red",
"#C71585",(199,21,133)),(
"pale violet red",
"#DB7093",(219,112,147)),
1871 (
"deep pink",
"#FF1493",(255,20,147)),(
"hot pink",
"#FF69B4",(255,105,180)),
1872 (
"light pink",
"#FFB6C1",(255,182,193)),(
"pink",
"#FFC0CB",(255,192,203))],
1873 "browns":[(
"saddle brown",
"#8B4513",(139,69,19)),(
"sienna",
"#A0522D",(160,82,45)),
1874 (
"chocolate",
"#D2691E",(210,105,30)),(
"peru",
"#CD853F",(205,133,63)),
1875 (
"sandy brown",
"#F4A460",(244,164,96)),(
"burly wood",
"#DEB887",(222,184,135)),
1876 (
"tan",
"#D2B48C",(210,180,140)),(
"rosy brown",
"#BC8F8F",(188,143,143)),
1877 (
"moccasin",
"#FFE4B5",(255,228,181)),(
"navajo white",
"#FFDEAD",(255,222,173)),
1878 (
"peach puff",
"#FFDAB9",(255,218,185)),(
"misty rose",
"#FFE4E1",(255,228,225)),
1879 (
"lavender blush",
"#FFF0F5",(255,240,245)),(
"linen",
"#FAF0E6",(250,240,230)),
1880 (
"old lace",
"#FDF5E6",(253,245,230)),(
"papaya whip",
"#FFEFD5",(255,239,213)),
1881 (
"sea shell",
"#FFF5EE",(255,245,238))],
1882 "greys":[(
"black",
"#000000",(0,0,0)),(
"dim gray / dim grey",
"#696969",(105,105,105)),
1883 (
"gray / grey",
"#808080",(128,128,128)),(
"dark gray / dark grey",
"#A9A9A9",(169,169,169)),
1884 (
"silver",
"#C0C0C0",(192,192,192)),(
"light gray / light grey",
"#D3D3D3",(211,211,211)),
1885 (
"gainsboro",
"#DCDCDC",(220,220,220)),(
"white smoke",
"#F5F5F5",(245,245,245)),
1886 (
"white",
"#FFFFFF",(255,255,255))]}
1888 def assign_color_group(self,color_group,representation,component_names):
1889 for n,p
in enumerate(component_names):
1891 psel=s.get_selected_particles()
1892 ctuple=self.colors[color_group][n]
1893 print(
"Assigning "+p+
" to color "+ctuple[0])
static bool get_is_setup(const IMP::ParticleAdaptor &p)
A decorator to associate a particle with a part of a protein/DNA/RNA.
static bool get_is_setup(const IMP::ParticleAdaptor &p)
void add_particles(RMF::FileHandle fh, const ParticlesTemp &hs)
Set of python classes to create a multi-state, multi-resolution IMP hierarchy.
static Weight setup_particle(Model *m, ParticleIndex pi)
double get_drmsd(const Vector3DsOrXYZs0 &m0, const Vector3DsOrXYZs1 &m1)
Calculate distance the root mean square deviation between two sets of 3D points.
Rotation3D get_random_rotation_3d(const Rotation3D ¢er, double distance)
Pick a rotation at random near the provided one.
def deprecated_function
Python decorator to mark a function as deprecated.
IMP::Vector< Color > Colors
ParticlesTemp get_particles(Model *m, const ParticleIndexes &ps)
void add_particle(RMF::FileHandle fh, Particle *hs)
static bool get_is_setup(const IMP::ParticleAdaptor &p)
static bool get_is_setup(const IMP::ParticleAdaptor &p)
GenericHierarchies get_leaves(Hierarchy mhd)
Get all the leaves of the bit of hierarchy.
This class initializes the root node of the global IMP.atom.Hierarchy.
double get_distance(XYZR a, XYZR b)
Compute the sphere distance between a and b.
Vector3D get_random_vector_in(const Cylinder3D &c)
Generate a random vector in a cylinder with uniform density.
Add resolution to a particle.
Object used to hold a set of restraints.
algebra::GridD< 3, algebra::DenseGridStorageD< 3, float >, float > get_grid(DensityMap *in_map)
Return a dense grid containing the voxels of the passed density map.
Stores a named protein chain.
static bool get_is_setup(Model *m, ParticleIndex pi)
ParticleIndexPairs get_indexes(const ParticlePairsTemp &ps)
static bool get_is_setup(Model *m, ParticleIndex pi)
The standard decorator for manipulating molecular structures.
Ints get_index(const ParticlesTemp &particles, const Subset &subset, const Subsets &excluded)
A decorator for a particle representing an atom.
void transform(XYZ a, const algebra::Transformation3D &tr)
Apply a transformation to the particle.
void load_frame(RMF::FileConstHandle file, RMF::FrameID frame)
A decorator for a particle with x,y,z coordinates.
static Scale setup_particle(Model *m, ParticleIndex pi)
static Colored setup_particle(Model *m, ParticleIndex pi, Color color)
A decorator for a particle that is part of a rigid body but not rigid.
std::ostream & show(Hierarchy h, std::ostream &out=std::cout)
Print the hierarchy using a given decorator to display each node.
Find all nearby pairs by testing all pairs.
static bool get_is_setup(const IMP::ParticleAdaptor &p)
A decorator for a residue.
General purpose algebraic and geometric methods that are expected to be used by a wide variety of IMP...
static bool get_is_setup(Model *m, ParticleIndex p)
Check if the particle has the needed attributes for a cast to succeed.
The general base class for IMP exceptions.
void link_hierarchies(RMF::FileConstHandle fh, const atom::Hierarchies &hs)
Class to handle individual model particles.
Stores a list of Molecules all with the same State index.
std::string get_data_path(std::string file_name)
Return the full path to one of this module's data files.
Python classes to represent, score, sample and analyze models.
double get_resolution(Model *m, ParticleIndex pi)
Estimate the resolution of the hierarchy as used by Representation.
A decorator for a rigid body.
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Hierarchies get_leaves(const Selection &h)
Select hierarchy particles identified by the biological name.
Support for the RMF file format for storing hierarchical molecular data and markup.
static bool get_is_setup(const IMP::ParticleAdaptor &p)
Transformation3D get_random_local_transformation(Vector3D origin, double max_translation=5., double max_angle_in_rad=0.26)
Get a local transformation.
Temporarily stores residue information, even without structure available.
Inferential scoring building on methods developed as part of the Inferential Structure Determination ...