Skip to content
GitLab
Projects
Groups
Snippets
/
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
schwede
ProMod3
Commits
abda7388
Commit
abda7388
authored
Aug 12, 2019
by
Tauriello Gerardo
Browse files
Merge branch 'release-2.0.0'
parents
4153444b
df7df159
Changes
175
Hide whitespace changes
Inline
Side-by-side
CHANGELOG
View file @
abda7388
...
...
@@ -5,6 +5,24 @@
Changelog
================================================================================
Release 2.0.0
--------------------------------------------------------------------------------
* Generalize particle scoring and rotamer construction in sidechain module.
This simplifies the addition of other scoring functions in the future.
Be aware of breaking changes introduced in the process!
(SCWRLRotamerConstructor -> SCWRL4RotamerConstructor, changed interface of
Particle and RotamerConstructor classes).
* Enable possibility to use structural fragments in default modelling pipeline
and build-model action
* Enable possibility to enforce full coverage models including termini without
template coverage in default modelling pipeline and build-model action
* Modelling pipeline can track issues in the ModellingHandle object
* External example scripts can now be found in extras/external_scripts
* Improved support for recent compilers and libraries.
* Several minor bug fixes, improvements, and speed-ups
Release 1.3.0
--------------------------------------------------------------------------------
...
...
CMakeLists.txt
View file @
abda7388
...
...
@@ -15,8 +15,8 @@ project(ProMod3 CXX C)
include
(
PROMOD3
)
# versioning info
set
(
PROMOD3_VERSION_MAJOR
1
)
set
(
PROMOD3_VERSION_MINOR
3
)
set
(
PROMOD3_VERSION_MAJOR
2
)
set
(
PROMOD3_VERSION_MINOR
0
)
set
(
PROMOD3_VERSION_PATCH 0
)
set
(
PROMOD3_VERSION_STRING
${
PROMOD3_VERSION_MAJOR
}
.
${
PROMOD3_VERSION_MINOR
}
)
set
(
PROMOD3_VERSION_STRING
${
PROMOD3_VERSION_STRING
}
.
${
PROMOD3_VERSION_PATCH
}
)
...
...
@@ -77,16 +77,23 @@ file(MAKE_DIRECTORY ${STAGE_DIR}
${
LIBEXEC_STAGE_PATH
}
)
setup_compiler_flags
()
setup_boost
()
# Python needed before Boost
find_package
(
Python 2.7 REQUIRED
)
# Split version string
string
(
REPLACE
"."
";"
_python_version_list
${
PYTHON_VERSION
}
)
list
(
GET _python_version_list 0 PYTHON_VERSION_MAJOR
)
list
(
GET _python_version_list 1 PYTHON_VERSION_MINOR
)
setup_boost
()
if
(
NOT DISABLE_DOCUMENTATION
)
find_package
(
Sphinx
${
PYTHON_VERSION
}
REQUIRED
)
set
(
PYTHON_DOC_URL
"https://docs.python.org/
${
PYTHON_VERSION
}
"
)
#
set
this
to the URL correspond
in
g
to the version of OST
you are using
set
(
OST_DOC_URL
"https://www.openstructure.org/docs
/dev
"
)
# this
URL should always po
in
t
to the
latest
version of OST
set
(
OST_DOC_URL
"https://www.openstructure.org/docs"
)
endif
()
find_package
(
OPENSTRUCTURE 1.
8
REQUIRED
find_package
(
OPENSTRUCTURE 1.
10.0
REQUIRED
COMPONENTS io mol seq seq_alg mol_alg conop img mol_mm
)
if
(
CMAKE_COMPILER_IS_GNUCXX
)
...
...
actions/doc/index.rst
View file @
abda7388
...
...
@@ -21,6 +21,7 @@ with
$
pm
build
-
model
[-
h
]
(-
f
<
FILE
>
|
-
c
<
FILE
>
|
-
j
<
OBJECT
>|<
FILE
>)
(-
p
<
FILE
>
|
-
e
<
FILE
>)
[-
s
<
FILE
>]
[-
o
<
FILENAME
>]
[-
r
]
[-
t
]
Example
usage
:
...
...
@@ -129,6 +130,23 @@ Example usage:
$
pm
build
-
model
-
f
aln
.
fasta
-
p
tpl
.
pdb
-
s
prof
.
hhm
A
fast
torsion
angle
based
sampling
is
performed
in
case
of
Monte
Carlo
sampling
.
You
can
enforce
the
usage
of
structural
fragments
with
``-
r
``
but
this
increases
runtime
due
to
searching
the
required
fragments
.
Setup
of
the
according
:
class
:`
promod3
.
modelling
.
FraggerHandle
`
objects
is
performed
in
the
:
class
:`~
promod3
.
core
.
pm3argparse
.
PM3ArgumentParser
`
class
as
described
in
detail
:
meth
:`
here
<
promod3
.
core
.
pm3argparse
.
PM3ArgumentParser
.
AddFragments
>`.
The
default
modelling
pipeline
in
ProMod3
is
optimized
to
generate
a
gap
-
free
model
of
the
region
in
the
target
sequence
(
s
)
that
is
covered
with
template
information
.
Terminal
extensions
without
template
coverage
are
negelected
.
You
can
enforce
a
model
of
the
full
target
sequence
(
s
)
by
adding
``-
t
``.
The
terminal
parts
will
be
modelled
with
a
crude
Monte
Carlo
approach
.
Be
aware
that
the
accuracy
of
those
termini
is
likely
to
be
limited
.
Termini
of
length
1
won
't be modelled.
Possible exit codes of the action:
- 0: all went well
...
...
@@ -190,3 +208,8 @@ Several flags control the modelling behaviour:
Dont do subrotamer optimization if flexible rotamer model is used
.. option:: -f, --energy_function
The energy function to be used. Default is SCWRL4, can be any function
supported by :meth:`promod3.modelling.ReconstructSidechains`.
actions/pm-build-model
View file @
abda7388
...
...
@@ -40,10 +40,17 @@ parser = pm3argparse.PM3ArgumentParser(__doc__, action=True)
parser.AddAlignment()
parser.AddStructure(attach_views=True)
parser.AddProfile()
parser.AddFragments()
parser.AssembleParser()
parser.add_argument('-o', '--model-file', metavar='<FILENAME>', type=str,
default='model.pdb', help='File to store model coordinates'+
' (default: %(default)s).')
parser.add_argument('-t', '--model-termini', help="Enforce modelling of " +
"terminal stretches without template coverage with a " +
"crude Monte Carlo approach. The accuracy of those " +
"termini is likely to be limited.", action="store_true")
# lots of checking being done here -> see PM3ArgumentParser
opts = parser.Parse()
...
...
@@ -70,14 +77,24 @@ try:
# pssm files would not be sufficient and we would be restricted to hhm.
if len(opts.profiles) > 0:
modelling.SetSequenceProfiles(mhandle, opts.profiles)
# add fragment support for Monte Carlo sampling. The fragment search
# is setup in the argument parser. If activated you get fragment support
# in any case but for optimal performance you should provide profiles
# in hhm format (for profile AND secondary structure information).
if len(opts.fragger_handles) > 0:
modelling.SetFraggerHandles(mhandle, opts.fragger_handles)
# build final model
final_model = modelling.BuildFromRawModel(mhandle)
final_model = modelling.BuildFromRawModel(mhandle,
model_termini=opts.model_termini)
except Exception as ex:
helper.MsgErrorAndExit("Failed to perform modelling! An exception of type "+
type(ex).__name__ + " occured: " + str(ex), 3)
# output
ost.PopVerbosityLevel()
io.SavePDB(final_model, opts.model_file)
if not os.path.isfile(opts.model_file):
try:
io.SavePDB(final_model, opts.model_file)
if not os.path.isfile(opts.model_file):
raise IOError("Failed to write model file.")
except Exception as ex:
helper.MsgErrorAndExit("Failed to write model file '%s'." % opts.model_file, 4)
actions/pm-build-rawmodel
View file @
abda7388
...
...
@@ -58,6 +58,9 @@ except Exception as ex:
# output
ost.PopVerbosityLevel()
io.SavePDB(mhandle.model, opts.model_file)
if not os.path.isfile(opts.model_file):
try:
io.SavePDB(mhandle.model, opts.model_file)
if not os.path.isfile(opts.model_file):
raise IOError("Failed to write model file.")
except Exception as ex:
helper.MsgErrorAndExit("Failed to write model file '%s'." % opts.model_file, 4)
actions/pm-build-sidechains
View file @
abda7388
...
...
@@ -54,6 +54,8 @@ parser.add_argument('-i', '--backbone-independent', action='store_true',
parser.add_argument('-s', '--no-subrotamer-optimization', action='store_true',
help='Dont do subrotamer optimization if flexible ' +
'rotamer model is used.')
parser.add_argument('-f', '--energy_function', action = 'store',
dest = "energy_function", default = "SCWRL4")
opts = parser.Parse()
...
...
@@ -80,7 +82,8 @@ modelling.ReconstructSidechains(prot, keep_sidechains=opts.keep_sidechains,
build_disulfids=opts.no_disulfids==False,
rotamer_model=rotamer_model,
rotamer_library=lib,
optimize_subrotamers=opt_subrot)
optimize_subrotamers=opt_subrot,
energy_function = opts.energy_function)
# output
ost.PopVerbosityLevel()
...
...
cmake_support/PROMOD3.cmake
View file @
abda7388
...
...
@@ -1009,8 +1009,28 @@ endmacro(setup_compiler_flags)
set
(
_BOOST_MIN_VERSION 1.53
)
macro
(
setup_boost
)
find_package
(
Boost
${
_BOOST_MIN_VERSION
}
COMPONENTS python REQUIRED
)
set
(
BOOST_PYTHON_LIBRARIES
${
Boost_LIBRARIES
}
)
# starting with CMake 3.11 we could use the following instead of the foreach
# find_package(Boost ${_BOOST_MIN_VERSION} COMPONENTS
# python${PYTHON_VERSION_MAJOR}${PYTHON_VERSION_MINOR} REQUIRED)
# set(BOOST_PYTHON_LIBRARIES ${Boost_LIBRARIES})
# see https://cmake.org/cmake/help/v3.11/module/FindBoost.html
foreach
(
_python_lib_name python
python
${
PYTHON_VERSION_MAJOR
}${
PYTHON_VERSION_MINOR
}
python
${
PYTHON_VERSION_MAJOR
}
.
${
PYTHON_VERSION_MINOR
}
python
${
PYTHON_VERSION_MAJOR
}
)
find_package
(
Boost
${
_BOOST_MIN_VERSION
}
COMPONENTS
${
_python_lib_name
}
QUIET
)
if
(
Boost_FOUND
)
message
(
STATUS
"Found Boost package: "
${
_python_lib_name
}
)
set
(
BOOST_PYTHON_LIBRARIES
${
Boost_LIBRARIES
}
)
break
()
else
()
message
(
STATUS
"Boost package not found: "
${
_python_lib_name
}
". Trying alternative names!"
)
endif
()
endforeach
(
_python_lib_name
)
if
(
NOT BOOST_PYTHON_LIBRARIES
)
message
(
FATAL_ERROR
"Failed to find any Boost Python library!"
)
endif
()
set
(
Boost_LIBRARIES
)
find_package
(
Boost
${
_BOOST_MIN_VERSION
}
COMPONENTS unit_test_framework REQUIRED
)
...
...
cmake_support/substitute.cmake
View file @
abda7388
# force configure_file to update ${OUT_FILE}
# -> otherwise it can cause continuous rebuilds if file was just touched
file
(
REMOVE
${
OUT_FILE
}
)
configure_file
(
${
INPUT_FILE
}
${
OUT_FILE
}
@ONLY
)
\ No newline at end of file
container/Dockerfile
View file @
abda7388
...
...
@@ -2,10 +2,10 @@ FROM ubuntu:18.04
# ARGUMENTS
###########
ARG
OPENSTRUCTURE_VERSION="1.
9
.0"
ARG
PROMOD_VERSION="
1.3
.0"
ARG
OPENSTRUCTURE_VERSION="1.
10
.0"
ARG
PROMOD_VERSION="
2.0
.0"
ARG
SRC_FOLDER="/usr/local/src"
ARG
CPUS_FOR_MAKE=
2
ARG
CPUS_FOR_MAKE=
4
ARG
COMPLIB_DIR="/usr/local/share/ost_complib"
ARG
OPENMM_VERSION="7.1.1"
ARG
OPENMM_INCLUDE_PATH="/usr/local/openmm/include/"
...
...
@@ -16,32 +16,34 @@ ENV DEBIAN_FRONTEND=noninteractive
# INSTALL SYSTEM DEPS
#####################
RUN
apt-get update
-y
&&
apt-get
install
-y
cmake
\
sip-dev
\
libtiff-dev
\
libfftw3-dev
\
libeigen3-dev
\
libboost-all-dev
\
libpng-dev
\
python2.7
\
python-qt4
\
g++
\
sip-dev
\
libtiff-dev
\
libfftw3-dev
\
libeigen3-dev
\
libboost-all-dev
\
libpng-dev
\
python2.7
\
python-pyqt5
\
qt5-qmake
\
qtbase5-dev
\
python-qt4
\
python-numpy
\
python-scipy
\
python-pandas
\
python-matplotlib
\
qt4-qtconfig
\
qt4-qmake
\
libqt4-dev
\
libpng-dev
\
wget
\
gfortran
\
python-pip
\
wget
\
gfortran
\
python-pip
\
python-sphinx
\
tar
\
libbz2-dev
\
doxygen
\
swig
\
clustalw
\
tar
\
libbz2-dev
\
doxygen
\
swig
\
clustalw
\
libsqlite3-dev
\
dssp
\
python-enum34
\
locales
&&
\
# CLEANUP
rm -rf /var/lib/apt/lists/*
...
...
@@ -102,7 +104,7 @@ RUN cd ${SRC_FOLDER} && \
wget ftp://ftp.wwpdb.org/pub/pdb/data/monomers/components.cif.gz &&\
stage/bin/chemdict_tool create components.cif.gz compounds.chemlib pdb && stage/bin/chemdict_tool update modules/conop/data/charmm.cif compounds.chemlib charmm &&\
mkdir -p $COMPLIB_DIR && chmod a+rw -R $COMPLIB_DIR && mv compounds.chemlib $COMPLIB_DIR && \
# rerun cmake and specify compoun
t
lib location
# rerun cmake and specify compoun
d
lib location
cmake .. -DCOMPOUND_LIB=$COMPLIB_DIR/compounds.chemlib && \
# Build OST with compound library
make -j ${CPUS_FOR_MAKE} && make check && make install && \
...
...
@@ -133,7 +135,8 @@ RUN cd ${SRC_FOLDER} && \
-DPYTHON_LIBRARIES=/usr/lib/x86_64-linux-gnu/libpython2.7.so \
-DOPTIMIZE=ON \
-DENABLE_SSE=1 && \
make && make codetest && make install && \
make -j ${CPUS_FOR_MAKE} && make codetest && make doctest && \
make install && \
# cleanup
cd ${SRC_FOLDER} && rm ${SRC_FOLDER}/promod-${PROMOD_VERSION}.tar.gz && \
rm -rf ${SRC_FOLDER}/promod-${PROMOD_VERSION}
...
...
core/CMakeLists.txt
View file @
abda7388
add_subdirectory
(
init
)
add_subdirectory
(
src
)
add_subdirectory
(
pymod
)
add_subdirectory
(
doc
)
add_subdirectory
(
tests
)
add_subdirectory
(
src
)
core/pymod/pm3argparse.py
View file @
abda7388
...
...
@@ -32,6 +32,7 @@ import ost
from
ost
import
io
,
seq
from
promod3.core
import
helper
from
promod3
import
loop
,
modelling
def
_TmpForGZip
(
filename
,
suffix
,
msg_prefix
):
"""Unpack a file to a tmp file if gzipped.
...
...
@@ -259,6 +260,18 @@ def _FetchProfileFromFile(filename):
str
(
exc
),
52
)
return
prof
def
_FetchPsipredFromFile
(
filename
):
"""Load psipred prediction from filename and return it."""
argstr
=
filename
+
": "
helper
.
FileExists
(
"Profile"
,
51
,
filename
)
try
:
pred
=
loop
.
PsipredPrediction
.
FromHHM
(
filename
)
except
Exception
,
exc
:
helper
.
MsgErrorAndExit
(
argstr
+
": failure to parse psipred "
+
"prediction: "
+
str
(
exc
),
56
)
return
pred
def
_GetChains
(
structures
,
structure_sources
):
"""Get chain id to entity view (single chain) mapping (dict)."""
# IDs: (file_base = base file name with no extensions)
...
...
@@ -427,6 +440,8 @@ class PM3ArgumentParser(argparse.ArgumentParser):
self
.
_AssembleStructure
()
if
'PROFILE'
in
self
.
activate
:
self
.
_AssembleProfile
()
if
'FRAGMENTS'
in
self
.
activate
:
self
.
_AssembleFragments
()
def
AddAlignment
(
self
,
allow_multitemplate
=
False
):
"""Commandline options for alignments.
...
...
@@ -603,6 +618,51 @@ class PM3ArgumentParser(argparse.ArgumentParser):
"""
self
.
activate
.
add
(
'PROFILE'
)
def
AddFragments
(
self
):
"""Commandline option for usage of Fragments
Activate everything needed to setup
:class:`promod3.modelling.FraggerHandle` objects in the argument parser.
Command line arguments are then added in :meth:`AssembleParser` and the
input is post processed and checked in :meth:`Parse`.
Options/arguments added:
* ``-r/--use-fragments`` - Boolean flag whether to setup fragger handles.
Notes:
* Fragger handles are setup to identify fragments in a
:class:`promod3.loop.StructureDB`.
* If no profiles are provided as additional argument
(``-s/--seqprof <FILE>``), fragments are identified based on BLOSUM62
sequence similarity.
* If you provide profiles that are not in hhm format, fragments are
identified based on BLOSUM62 sequence similarity, sequence profile
scoring and structural profile scoring.
* If you provide profiles in hhm format (optimal case), psipred
predictions are fetched from there and fragments are identified based
on secondary structure agreement, secondary structure dependent
torsion probabilities, sequence profile scoring and structure
profile scoring.
Attributes added to the namespace returned by :meth:`Parse`:
* :attr:`fragger_handles` - :class:`list` of
:class:`promod3.modelling.FraggerHandle`, ordered to match the target
sequences.
Exit codes related to fragments input:
* 56 - cannot read psipred prediction from hhm file
"""
self
.
activate
.
add
(
'FRAGMENTS'
)
def
_AssembleAlignment
(
self
):
"""Actually add alignment arguments/options."""
aln_grp
=
self
.
add_mutually_exclusive_group
(
required
=
True
)
...
...
@@ -650,6 +710,15 @@ class PM3ArgumentParser(argparse.ArgumentParser):
".hhm, .hhm.gz, .pssm, .pssm.gz"
,
action
=
'append'
,
default
=
list
())
def
_AssembleFragments
(
self
):
self
.
add_argument
(
'-r'
,
'--use-fragments'
,
help
=
"Use fragments instead of torsion angle "
+
"based sampling for Monte Carlo approaches. "
+
"For optimal performance you should provide "
+
"sequence profiles in hhm format. (File "
+
"extensions: .hhm or .hhm.gz). BUT: be aware of "
+
"increased runtime."
,
action
=
"store_true"
)
class
PM3OptionsNamespace
(
object
):
# class will grow, so for the moment pylint is ignored
#pylint: disable=too-few-public-methods
...
...
@@ -672,6 +741,8 @@ class PM3OptionsNamespace(object):
self
.
_AttachViews
()
if
'PROFILE'
in
activated
:
self
.
_PostProcessProfile
()
if
'FRAGMENTS'
in
activated
:
self
.
_PostProcessFragments
()
def
_PostProcessAlignment
(
self
):
#pylint: disable=no-member
...
...
@@ -730,11 +801,11 @@ class PM3OptionsNamespace(object):
# so not having any profile is fine
return
loaded_profiles
=
list
()
self
.
loaded_profiles
=
list
()
for
src
in
self
.
seqprof
:
loaded_profiles
.
append
(
_FetchProfileFromFile
(
src
))
self
.
loaded_profiles
.
append
(
_FetchProfileFromFile
(
src
))
prof_sequences
=
[
p
.
sequence
for
p
in
loaded_profiles
]
prof_sequences
=
[
p
.
sequence
for
p
in
self
.
loaded_profiles
]
# check uniqueness of loaded profiles
if
len
(
set
(
prof_sequences
))
!=
len
(
prof_sequences
):
...
...
@@ -746,7 +817,7 @@ class PM3OptionsNamespace(object):
for
aln
in
self
.
alignments
]
for
s
in
trg_sequences
:
try
:
self
.
profiles
.
append
(
loaded_profiles
[
prof_sequences
.
index
(
s
)])
self
.
profiles
.
append
(
self
.
loaded_profiles
[
prof_sequences
.
index
(
s
)])
except
Exception
,
exc
:
helper
.
MsgErrorAndExit
(
"Could not find profile with sequence "
+
"that exactly matches trg seq: "
+
s
,
55
)
...
...
@@ -758,6 +829,80 @@ class PM3OptionsNamespace(object):
helper
.
MsgErrorAndExit
(
"Could not map every profile to a target "
+
"sequence"
,
53
)
def
_PostProcessFragments
(
self
):
self
.
fragger_handles
=
list
()
if
not
self
.
use_fragments
:
# no fragments requested, so lets just return
return
trg_sequences
=
[
aln
.
GetSequence
(
0
).
GetGaplessString
()
\
for
aln
in
self
.
alignments
]
# we only want to setup a Fragger for every unique target sequence
unique_trg_sequences
=
list
(
set
(
trg_sequences
))
# already setup variables, fill later if required data is present
profiles
=
[
None
]
*
len
(
unique_trg_sequences
)
psipred_predictions
=
[
None
]
*
len
(
unique_trg_sequences
)
ts_coil
=
None
ts_helix
=
None
ts_extended
=
None
# a structure db we need anyway. Load once and assign the same to all
# fraggers to avoid memory explosion
structure_db
=
loop
.
LoadStructureDB
()
# load the profiles
if
hasattr
(
self
,
"profiles"
)
and
len
(
self
.
profiles
)
>
0
:
profile_dict
=
dict
()
for
p
in
self
.
loaded_profiles
:
profile_dict
[
p
.
sequence
]
=
p
# as we already mapped the profiles in _PostProcessProfiles,
# the following is guaranteed to find the right profile
# for every unique target sequence
for
s_idx
,
s
in
enumerate
(
unique_trg_sequences
):
profiles
[
s_idx
]
=
profile_dict
[
s
]
# For the psipred predictions we have to go back to the
# input files. If they all end with .hhm or hhm.gz we're ready to go
file_endings_ok
=
True
for
src
in
self
.
seqprof
:
if
not
(
src
.
endswith
(
".hhm"
)
or
src
.
endswith
(
".hhm.gz"
)):
file_endings_ok
=
False
break
if
file_endings_ok
:
# lets load the torsion samplers now as they are only required
# if we also add psipred handlers
ts_coil
=
loop
.
LoadTorsionSamplerCoil
()
ts_extended
=
loop
.
LoadTorsionSamplerExtended
()
ts_helix
=
loop
.
LoadTorsionSamplerHelical
()
# to get the right filenames we use the sequences of the
# loaded profiles that are in the same order as self.seqprof
profile_sequences
=
[
p
.
sequence
for
p
in
self
.
loaded_profiles
]
for
s_idx
,
s
in
enumerate
(
unique_trg_sequences
):
fn
=
self
.
seqprof
[
profile_sequences
.
index
(
s
)]
psipred_predictions
[
s_idx
]
=
_FetchPsipredFromFile
(
fn
)
# setup one fragger handle for each unique sequence
fraggers
=
list
()
for
i
in
range
(
len
(
unique_trg_sequences
)):
fraggers
.
append
(
modelling
.
FraggerHandle
(
unique_trg_sequences
[
i
],
profile
=
profiles
[
i
],
psipred_pred
=
psipred_predictions
[
i
],
rmsd_thresh
=
0.02
,
structure_db
=
structure_db
,
torsion_sampler_coil
=
ts_coil
,
torsion_sampler_helix
=
ts_helix
,
torsion_sampler_extended
=
ts_extended
))
# map them to the chains
for
s
in
trg_sequences
:
self
.
fragger_handles
.
append
(
fraggers
[
unique_trg_sequences
.
index
(
s
)])
# LocalWords: param attr prog argparse ArgumentParser bool sys os init str
# LocalWords: progattr descattr argpinit argv formatter meth args namespace
# LocalWords: ArgumentDefaultsHelpFormatter sysargv AssembleParser fasta io
...
...
core/tests/test_pm3argparse.py
View file @
abda7388
...
...
@@ -1225,6 +1225,71 @@ class PM3ArgParseTests(unittest.TestCase):
opts
.
profiles
[
1
].
sequence
)
def
testFraggerNotRequested
(
self
):
parser
=
pm3argparse
.
PM3ArgumentParser
(
__doc__
,
action
=
False
)
parser
.
AddAlignment
()
parser
.
AddFragments
()
parser
.
AssembleParser
()
opts
=
parser
.
Parse
([
'-f'
,
'data/aln_tpl/1crn.fasta'
,
'-f'
,
'data/aln_tpl/5ua4_B.fasta'
])
self
.
assertEqual
(
len
(
opts
.
alignments
),
2
)
self
.
assertEqual
(
len
(
opts
.
fragger_handles
),
0
)
def
testFraggerAttachedWithoutProfile
(
self
):
parser
=
pm3argparse
.
PM3ArgumentParser
(
__doc__
,
action
=
False
)
parser
.
AddAlignment
()
parser
.
AddFragments
()
parser
.
AssembleParser
()
opts
=
parser
.
Parse
([
'-f'
,
'data/aln_tpl/1crn.fasta'
,
'-f'
,
'data/aln_tpl/5ua4_B.fasta'
,
'--use-fragments'
])
self
.
assertEqual
(
len
(
opts
.
alignments
),
2
)
self
.
assertEqual
(
len
(
opts
.
fragger_handles
),
2
)
self
.
assertEqual
(
opts
.
alignments
[
0
].
GetSequence
(
0
).
GetGaplessString
(),
opts
.
fragger_handles
[
0
].
sequence
)
self
.
assertEqual
(
opts
.
alignments
[
1
].
GetSequence
(
0
).
GetGaplessString
(),
opts
.
fragger_handles
[
1
].
sequence
)
# most of the stuff in the fragger handles should be None
self
.
assertIsNone
(
opts
.
fragger_handles
[
0
].
profile
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
0
].
psipred_pred
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
0
].
torsion_sampler_coil
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
0
].
torsion_sampler_helix
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
0
].
torsion_sampler_extended
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
1
].
profile
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
1
].
psipred_pred
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
1
].
torsion_sampler_coil
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
1
].
torsion_sampler_helix
)
self
.
assertIsNone
(
opts
.
fragger_handles
[
1
].
torsion_sampler_extended
)
def
testFraggerAttachedWithProfile
(
self
):
parser
=
pm3argparse
.
PM3ArgumentParser
(
__doc__
,
action
=
False
)
parser
.
AddAlignment
()
parser
.
AddProfile
()
parser
.
AddFragments
()
parser
.
AssembleParser
()
opts
=
parser
.
Parse
([
'-f'
,
'data/aln_tpl/1crn.fasta'
,
'-f'
,
'data/aln_tpl/5ua4_B.fasta'
,
'-s'
,
'data/aln_tpl/1crn.hhm'
,
'-s'
,
'data/aln_tpl/5ua4_B.hhm'
,
'--use-fragments'
])
self
.
assertEqual
(
len
(
opts
.
alignments
),
2
)
self
.
assertEqual
(
len
(
opts
.
fragger_handles
),
2
)
self
.
assertEqual
(
opts
.
alignments
[
0
].
GetSequence
(
0
).
GetGaplessString
(),
opts
.
fragger_handles
[
0
].
sequence
)
self
.
assertEqual
(
opts
.
alignments
[
1
].
GetSequence
(
0
).
GetGaplessString
(),
opts
.
fragger_handles
[
1
].
sequence
)
# most of the stuff in the fragger handles should be set
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
0
].
profile
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
0
].
psipred_pred
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
0
].
torsion_sampler_coil
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
0
].
torsion_sampler_helix
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
0
].
torsion_sampler_extended
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
1
].
profile
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
1
].
psipred_pred
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
1
].
torsion_sampler_coil
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
1
].
torsion_sampler_helix
)
self
.
assertIsNotNone
(
opts
.
fragger_handles
[
1
].
torsion_sampler_extended
)
# test options: --disable-aln check (for amino acids)
# test options: --disable-input-checks (for all)
# test option: --disable-mm-check (macromolecule)
...
...
doc/CMakeLists.txt
View file @
abda7388
...
...
@@ -12,6 +12,7 @@ dev_setup.rst
users.rst
buildsystem.rst
contributing.rst
user_contributions.rst
gettingstarted.rst
portableIO.rst
references.rst
...
...
@@ -127,6 +128,13 @@ endforeach()
# add dependencies from doctests
list
(
APPEND _DOC_MODULE_DEPS
"
${
PM3_DOC_DEPS_doctests
}
"
)
# ensure that we wait for all modules to be done (solves parallel compile issue)
# -> since this is a custom target, it will not trigger a rebuild
# -> this is intended as rebuild is only needed for files in _DOC_MODULE_DEPS
add_custom_target
(
doc_wait_for_modules DEPENDS
${
PM3_PYMODULES
}
)
# add custom target also for all custom commands (solves parallel compile issue)
# -> https://samthursfield.wordpress.com/2015/11/21/cmake-dependencies-between-targets-and-files-and-custom-commands/
add_custom_target
(
doc_wait_for_rst DEPENDS
"
${
_SPHINX_CONF_PY
}
"
${
_RST_DEPS
}
)
# create targets for sphinx