diff --git a/CHANGELOG.txt b/CHANGELOG.txt
index 00bd9b3e2765afaf9c8de816bfbbc5882b8c2eca..d468d0dfb1d0b75d18530c1488c50cf70b725f16 100644
--- a/CHANGELOG.txt
+++ b/CHANGELOG.txt
@@ -1,17 +1,21 @@
-Changes in Release <RELEASE NUMBER>
+Changes in Release 1.8.0
 --------------------------------------------------------------------------------
 
-  * nonstandard C++ module was moved from ost.conop to ost.mol.alg. This implies
-    change in the API. Mapping functions CopyResidue, CopyConserved and
-    CopyNonConserved that were previousely imported from ost.conop are now
-    to be imported from ost.mol.alg.
+  * Introduced recipes to generate Docker and Singularity images.
+  * Moved "nonstandard" functions from ost.conop to ost.mol.alg. Mapping
+    functions CopyResidue, CopyConserved and CopyNonConserved that were
+    previousely imported from ost.conop are now to be imported from ost.mol.alg.
   * Removed habit of changing secondary structure of entities when loading
     from mmCIF PDB files. Before, OST would turn secondary structure 'EEH'
     into 'ECH' to make it look nicer in DNG. Now, 'EEH' stays 'EEH'.
   * Added Molck API to the ost.mol.alg module.
   * Extended lDDT API in ost.mol.alg module to reproduce functionality of lddt
-    binary.
+    binary and fixed issues in stereo chemistry checks there.
   * Added `actions` interface including one action to compare structures.
+  * Updated HHblits binding (minor changes for optional arguments).
+  * Added functionality to find optimal membrane position of protein.
+  * Support for recent compilers which use C++-11 by default.
+  * Several minor bug fixes, improvements, and speed-ups.
 
 Changes in Release 1.7.1
 --------------------------------------------------------------------------------
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 4327ea12f5523885cc462554bce4c7efcb93eab0..74a3348880146b72ffb3f3e045ef4502110eea98 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -6,8 +6,8 @@ cmake_minimum_required(VERSION 2.6.4 FATAL_ERROR)
 project(OpenStructure CXX C)
 set (CMAKE_EXPORT_COMPILE_COMMANDS 1)
 set (OST_VERSION_MAJOR 1)
-set (OST_VERSION_MINOR 7)
-set (OST_VERSION_PATCH 1)
+set (OST_VERSION_MINOR 8)
+set (OST_VERSION_PATCH 0)
 set (OST_VERSION_STRING ${OST_VERSION_MAJOR}.${OST_VERSION_MINOR}.${OST_VERSION_PATCH} )
 set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${PROJECT_SOURCE_DIR}/cmake_support)
 include(OST)
diff --git a/actions/ost-compare-structures b/actions/ost-compare-structures
index b4a192631f961f54e07059a56057c839f2651007..ff579be64c219ea4029389fb6be4c23eda099d13 100644
--- a/actions/ost-compare-structures
+++ b/actions/ost-compare-structures
@@ -13,8 +13,10 @@ eg.
       --remove oxt hyd \\
       --map-nonstandard-residues
 
-If desired one can recreate what CAMEO is calculating. CAMEO calls lddt binary
-as follows:
+Here we describe how the parameters can be set to mimick a CAMEO evaluation
+(as of August 2018).
+
+CAMEO calls the lddt binary as follows:
 
   lddt \\
       -p <PARAMETER FILE> \\
diff --git a/doc/conf/conf.py b/doc/conf/conf.py
index 9ddc034d552ed0d85a7f74aa500d22ed415ecd68..78949cd7a85e80047bb397debf755fd8385827ee 100644
--- a/doc/conf/conf.py
+++ b/doc/conf/conf.py
@@ -42,7 +42,7 @@ master_doc = 'index'
 
 # General information about the project.
 project = u'OpenStructure'
-copyright = u'2011, OpenStructure authors'
+copyright = u'2018, OpenStructure authors'
 
 # The version info for the project you're documenting, acts as replacement for
 # |version| and |release|, also used in various other places throughout the
diff --git a/docker/Dockerfile b/docker/Dockerfile
index c3fa238362271d281a3416bac646b77ea1ea6be5..996b7fc8b87adb21b37c209ef48d267706d469c0 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -95,7 +95,7 @@ RUN cd ${SRC_FOLDER} && \
 # COMPILE AND INSTALL DSSP
 ##########################
 RUN cd ${SRC_FOLDER} && \
-    wget ftp://ftp.cmbi.ru.nl/pub/software/dssp/dssp-${DSSP_VERSION}.tgz && \
+    wget ftp://ftp.cmbi.umcn.nl/pub/molbio/software/dssp-2/dssp-${DSSP_VERSION}.tgz && \
     tar -xvzf dssp-${DSSP_VERSION}.tgz && \
     cd dssp-${DSSP_VERSION} && \
     make -j ${CPUS_FOR_MAKE} && \
@@ -168,7 +168,6 @@ WORKDIR /home
 # ENVIRONMENT
 ##############################################################################
 ENV OST_ROOT="/usr/local"
-ENV OPENSTRUCTURE_VERSION="1.7.1"
 ENV PYTHONPATH="/usr/local/lib64/python2.7/site-packages:${PYTHONPATH}"
 ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib64"
 ENV QT_X11_NO_MITSHM=1
diff --git a/docker/README.rst b/docker/README.rst
index 4700eb156cad0663893a957ff0eb42aafbffac7e..ebed4e994c2aac8045b0f1171fb38948bb19d849 100644
--- a/docker/README.rst
+++ b/docker/README.rst
@@ -4,7 +4,7 @@ OST Docker
 .. note::
 
   For many docker installations it is required to run docker commands as root. As
-  this depends on set up, we skip the `sudo` in all commands.
+  this depends on set up, we skip the ``sudo`` in all commands.
 
 Build Docker image
 ------------------
@@ -23,15 +23,19 @@ or if you downloaded the Dockerfile directly:
 
   docker build --tag <IMAGE NAME> --build-arg OPENSTRUCTURE_VERSION=<VERSION> -f <DOCKERFILE NAME> <PATH TO DOCKERFILE DIR>
 
-You can chose any image name (tag) eg. ost. The `OPENSTRUCTURE_VERSION`
+You can chose any image name (tag) eg. ost. The ``OPENSTRUCTURE_VERSION``
 build argument is mandatory and image will not built without it. See
 `CHANGELOG <https://git.scicore.unibas.ch/schwede/openstructure/blob/master/CHANGELOG.txt>`_
-for current list of available releases.
+for current list of available releases. This is not expected to work for
+versions which are much older than the most recent one since the dependencies
+might have changed, but it should work for a few versions. If you need the
+recipe for an older version, we suggest to get an older recipe from the git
+history.
 
 Testing the image
 -----------------
 
-One can find a exemplary script (`test_docker.py`) in the downloaded directory.
+One can find a exemplary script (``test_docker.py``) in the downloaded directory.
 To run it do:
 
 .. code-block::
@@ -39,7 +43,7 @@ To run it do:
   cd <PATH TO OST>/docker
   docker run --rm -v $(pwd):/home <IMAGE NAME> test_docker.py
 
-As the last line you should see `OST is working!`.
+As the last line you should see ``OST is working!``.
 
 Run script and action with OST
 ------------------------------
@@ -61,7 +65,7 @@ Run script and action with OST
 
   .. code-block:: bash
 
-    docker run --rm -v /home/user:/home <IMAGE NAME> home/script.py /home/pdbs/struct.pdb
+    docker run --rm -v /home/user:/home <IMAGE NAME> /home/script.py /home/pdbs/struct.pdb
   
   An easy solution to mount a CWD is to use $(pwd) command in the -v option
   of the Docker. For an example see the action exemplary run.
@@ -83,7 +87,7 @@ To run chosen action do:
     docker run --rm <IMAGE NAME> <ACTION NAME>
 
  
-Here is an example run of compare-structures action mimicking CAMEO evaluation:
+Here is an example run of the compare-structures action:
 
 .. code-block::
 
@@ -123,7 +127,7 @@ In order to run OST script do:
 Run ost with utility command
 ###############################
 
-One can also use provided utility bash script `run_docker_ost` to run basic
+One can also use provided utility bash script ``run_docker_ost`` to run basic
 scripts and actions:
 
 .. code-block:: bash
@@ -137,7 +141,7 @@ options. It is useful to link the command to the binary directory eg. in linux:
 
   ln -s <PATH TO OST>/docker/run_docker_ost /usr/bin/run_docker_ost
 
-In order to run an exemplary script (`test_docker.py`) do:
+In order to run an exemplary script (``test_docker.py``) do:
 
 .. code-block::
 
@@ -170,13 +174,13 @@ the entrypoint:
 
 .. code-block::
 
-  sudo docker run --rm -ti --entrypoint <COMMAND> <IMAGE NAME> [COMMAND OPTIONS]
+  docker run --rm -ti --entrypoint <COMMAND> <IMAGE NAME> [COMMAND OPTIONS]
 
 Eg. to run molck type:
 
 .. code-block::
 
-  sudo docker run --rm -ti --entrypoint molck <IMAGE NAME> --help
+  docker run --rm -ti --entrypoint molck <IMAGE NAME> --help
 
 .. note::
 
diff --git a/modules/base/src/test_utils/compare_files.cc b/modules/base/src/test_utils/compare_files.cc
index 904b14a7fa75dde9bf4edcf3cab176966f312dc1..13dac63adcd4a2f8244303f66d187cadade08ba3 100644
--- a/modules/base/src/test_utils/compare_files.cc
+++ b/modules/base/src/test_utils/compare_files.cc
@@ -37,17 +37,18 @@ bool compare_files(const String& test, const String& gold_standard)
   }
   String test_line, gold_line;
   while (true) {
-    bool test_end=std::getline(test_stream, test_line) != 0;
-    bool gold_end=std::getline(gold_stream, gold_line) != 0;
-    if (!(test_end || gold_end)) {
+    bool test_read = static_cast<bool>(std::getline(test_stream, test_line));
+    bool gold_read = static_cast<bool>(std::getline(gold_stream, gold_line));
+    if (!test_read && !gold_read) {
+      // nothing to read anymore in any of the files
       return true;
     }
-    if (!test_end) {
+    if (gold_read && !test_read) {
       std::cerr << gold_standard << " contains additional line(s):"
                 << std::endl << gold_line << std::endl;
       return false;
     }
-    if (!gold_end) {
+    if (test_read && !gold_read) {
       std::cerr << test << " contains additional line(s):"
                 << std::endl << test_line << std::endl;
       return false;
diff --git a/modules/bindings/doc/hhblits.rst b/modules/bindings/doc/hhblits.rst
index c822d5bd0641d1fc6a863066f4196e3fe77f467a..9ffb1c0c6f5d81b2a35b4299e54a30165bf656e1 100644
--- a/modules/bindings/doc/hhblits.rst
+++ b/modules/bindings/doc/hhblits.rst
@@ -1,9 +1,6 @@
 :mod:`~ost.bindings.hhblits` - Search related sequences in databases
 ================================================================================
 
-.. module:: ost.bindings.hhblits
-  :synopsis: Search related sequences in databases
-
 Introduction
 --------------------------------------------------------------------------------
 
@@ -15,7 +12,7 @@ one is provided, queried with a sequence profile. The latter one needs to be
 calculated before the actual search. In very simple words, HHblits is using
 per-sequence scoring functions to be more sensitive, in this particular case
 Hidden Markov models. The software suite needed for HHblits can be found
-`here <http://toolkit.tuebingen.mpg.de/hhblits>`_.
+`here <http://wwwuser.gwdg.de/~compbiol/data/hhsuite/releases/all/>`_.
 
 
 Examples
@@ -59,6 +56,9 @@ First query by sequence:
   for hit in hits:
       print hit.aln
 
+  # cleanup
+  hh.Cleanup()
+
 Very similar going by file:
 
 .. code-block:: python
@@ -84,6 +84,9 @@ Very similar going by file:
   for hit in hits:
       print hit.aln
 
+  # cleanup
+  hh.Cleanup()
+
 The alignments produced by HHblits are sometimes slightly better than by BLAST,
 so one may want to extract them:
 
@@ -105,24 +108,15 @@ so one may want to extract them:
 
   print output['msa']
 
+  # cleanup
+  hh.Cleanup()
+
 
 Binding API
 --------------------------------------------------------------------------------
 
-.. autoclass:: ost.bindings.hhblits.HHblits
+.. automodule:: ost.bindings.hhblits
+   :synopsis: Search related sequences in databases
    :members:
 
-.. autoclass:: ost.bindings.hhblits.HHblitsHit
-
-.. autoclass:: ost.bindings.hhblits.HHblitsHeader
-
-.. autofunction:: ost.bindings.hhblits.ParseHHblitsOutput
-
-.. autofunction:: ost.bindings.hhblits.ParseA3M
-
-.. autofunction:: ost.bindings.hhblits.ParseHeaderLine
-
-.. autofunction:: ost.bindings.hhblits.ParseHHM
-
-.. autofunction:: ost.bindings.hhblits.EstimateMemConsumption
 ..  LocalWords:  HHblits homologs
diff --git a/modules/bindings/pymod/hhblits.py b/modules/bindings/pymod/hhblits.py
index dddaa7404688f414eaa1a3915c6141b049ff52ce..0a1045171c167a5e24e958d4256739443b37970b 100644
--- a/modules/bindings/pymod/hhblits.py
+++ b/modules/bindings/pymod/hhblits.py
@@ -1,4 +1,4 @@
-'''HHblits wrapper.
+'''HHblits wrapper classes and functions.
 '''
 
 import subprocess
@@ -124,8 +124,8 @@ def ParseHeaderLine(line):
     :param line: Line from the output header.
     :type line: :class:`str`
 
-    :return: Hit information
-    :rtype: :class:`HHblitsHit`
+    :return: Hit information and query/template offsets
+    :rtype: (:class:`HHblitsHit`, (:class:`int`, :class:`int`))
     '''
     for i in range(0, len(line)):
         if line[i].isdigit():
@@ -147,15 +147,14 @@ def ParseHeaderLine(line):
 
 def ParseHHblitsOutput(output):
     """
-    Parses the HHblits output and returns a tuple of :class:`HHblitsHeader` and
-    a list of :class:`HHblitsHit` instances.
+    Parses the HHblits output as produced by :meth:`HHblits.Search` and returns
+    the header of the search results and a list of hits.
 
-    :param output: output of a :meth:`HHblits.Search`, needs to be iteratable,
-                   e.g. an open file handle
-    :type output: :class:`file`/ iteratable
+    :param output: Iterable containing the lines of the HHblits output file
+    :type output: iterable (e.g. an open file handle)
 
     :return: a tuple of the header of the search results and the hits
-    :rtype: (:class:`HHblitsHeader`, :class:`HHblitsHit`)
+    :rtype: (:class:`HHblitsHeader`, :class:`list` of :class:`HHblitsHit`)
     """
     lines = iter(output)
     def _ParseHeaderSection(lines):
@@ -216,6 +215,12 @@ def ParseHHblitsOutput(output):
             return seq.CreateAlignment(s1, s2)
         try:
             while True:
+                # Lines which we are interested in:
+                # - "Done!" -> end of list
+                # - "No ..." -> next item in list
+                # - "T <hit_id> <start> <data> <end>"
+                # - "Q <query_id> <start> <data> <end>"
+                # -> rest is to be skipped
                 line = lines.next()
                 if len(line.strip()) == 0:
                     continue
@@ -239,19 +244,30 @@ def ParseHHblitsOutput(output):
                     lines.next()
                     continue
                 assert entry_index != None
+                # Skip all "T ..." and "Q ..." lines besides the one we want
                 if line[1:].startswith(' Consensus'):
                     continue
                 if line[1:].startswith(' ss_pred'):
                     continue
                 if line[1:].startswith(' ss_conf'):
                     continue
+                if line[1:].startswith(' ss_dssp'):
+                    continue
                 if line.startswith('T '):
                     end_pos = line.find(' ', 22)
-                    assert end_pos != -1
+                    # this can fail if we didn't skip all other "T ..." lines
+                    if end_pos == -1:
+                        error_str = "Unparsable line '%s' for entry No %d" \
+                                    % (line.strip(), entry_index + 1)
+                        raise AssertionError(error_str)
                     templ_str += line[22:end_pos]
                 if line.startswith('Q '):
                     end_pos = line.find(' ', 22)
-                    assert end_pos != -1
+                    # this can fail if we didn't skip all other "Q ..." lines
+                    if end_pos == -1:
+                        error_str = "Unparsable line '%s' for entry No %d" \
+                                    % (line.strip(), entry_index + 1)
+                        raise AssertionError(error_str)
                     query_str += line[22:end_pos]
         except StopIteration:
             if len(query_str) > 0:
@@ -271,10 +287,10 @@ def ParseHHblitsOutput(output):
 def ParseA3M(a3m_file):
     '''
     Parse secondary structure information and the multiple sequence alignment 
-    out of an A3M file.
+    out of an A3M file as produced by :meth:`HHblits.BuildQueryMSA`.
     
-    :param a3m_file: Iteratable containing the lines of the A3M file
-    :type a3m_file: iteratable, e.g. an opened file
+    :param a3m_file: Iterable containing the lines of the A3M file
+    :type a3m_file: iterable (e.g. an open file handle)
     
     :return: Dictionary containing "ss_pred" (:class:`list`), "ss_conf"
              (:class:`list`) and "msa" (:class:`~ost.seq.AlignmentHandle`).
@@ -323,36 +339,36 @@ def ParseA3M(a3m_file):
         t = msa_seq[0]
         al = seq.AlignmentList()
         for i in range(1, len(msa_seq)):
-          qs = ''
-          ts = ''
-          k = 0
-          for c in msa_seq[i]:
-            if c.islower():
-              qs += '-'
-              ts += c.upper()
-            else:
-              qs += t[k]
-              ts += c
-              k += 1
-          nl = seq.CreateAlignment(seq.CreateSequence(msa_head[0], qs), 
-                                 seq.CreateSequence(msa_head[i], ts))
-          al.append(nl)
+            qs = ''
+            ts = ''
+            k = 0
+            for c in msa_seq[i]:
+                if c.islower():
+                    qs += '-'
+                    ts += c.upper()
+                else:
+                    qs += t[k]
+                    ts += c
+                    k += 1
+            nl = seq.CreateAlignment(seq.CreateSequence(msa_head[0], qs), 
+                                     seq.CreateSequence(msa_head[i], ts))
+            al.append(nl)
         profile_dict['msa'] = seq.alg.MergePairwiseAlignments(\
-                                                al,
-                                                seq.CreateSequence(msa_head[0],
-                                                                   t))
+            al, seq.CreateSequence(msa_head[0], t))
     return profile_dict
 
 
 def ParseHHM(profile):
-    '''Parse secondary structure information and the MSA out of an HHM profile.
+    '''
+    Parse secondary structure information and the MSA out of an HHM profile as
+    produced by :meth:`HHblits.A3MToProfile`.
 
     :param profile: Opened file handle holding the profile.
     :type profile: :class:`file`
 
     :return: Dictionary containing "ss_pred" (:class:`list`), "ss_conf"
              (:class:`list`), "msa" (:class:`~ost.seq.AlignmentHandle`) and
-             "consensus" (~ost.seq.SequenceHandle).
+             "consensus" (:class:`~ost.seq.SequenceHandle`).
     '''
     profile_dict = dict()
     state = 'NONE'
@@ -423,25 +439,13 @@ def ParseHHM(profile):
                                      seq.CreateSequence(msa_head[i], ts))
             al.append(nl)
         profile_dict['msa'] = seq.alg.MergePairwiseAlignments(\
-                                            al,
-                                            seq.CreateSequence(msa_head[0], t))
-      #print profile_dict['msa'].ToString(80)
+            al, seq.CreateSequence(msa_head[0], t))
+        #print profile_dict['msa'].ToString(80)
     # Consensus
     profile_dict['consensus'] = seq.CreateSequence('Consensus', consensus_txt)
 
     return profile_dict
 
-def EstimateMemConsumption():
-    """
-    Estimate the memory needed by HHblits. By default it uses not more than 3G.
-    Also for small sequences it already uses quite some memnmory (46AA, 1.48G).
-    And since the memory consumption could depend on the iterative search runs,
-    how many hits are found in each step, we just go with 4G, here.
-
-    :return: Assumed memory consumtion
-    :rtype: (:class:`float`, :class:`str`)
-    """
-    return 4.0, 'G'
 
 class HHblits:
     """
@@ -461,7 +465,6 @@ class HHblits:
     :param working_dir: Directory for temporary files. Will be created if not
                         present but **not** automatically deleted.
     :type working_dir: :class:`str`
-
     """
     OUTPUT_PREFIX = 'query_hhblits'
     def __init__(self, query, hhsuite_root, hhblits_bin=None, working_dir=None):
@@ -474,6 +477,8 @@ class HHblits:
             self.hhblits_bin = settings.Locate('hhblits',
                                                explicit_file_name=hhblits_bin)
             self.bin_dir = os.path.dirname(self.hhblits_bin)
+            # guess root folder (note: this may fail in future)
+            self.hhsuite_root = os.path.dirname(self.bin_dir)
         self.hhlib_dir = os.path.join(self.hhsuite_root, 'lib', 'hh')
         if working_dir:
             self.needs_cleanup = False
@@ -501,63 +506,63 @@ class HHblits:
                 self.working_dir = tmp_dir.dirname
                 self.filename = tmp_dir.files[0]
 
-    def Cleanup(self):
-        """Delete temporary data.
+    def BuildQueryMSA(self, nrdb, options={}, a3m_file=None):
+        """Builds the MSA for the query sequence.
 
-        Delete temporary data if no working dir was given. Controlled by
-        :attr:`needs_cleanup`.
-        """
-        if self.needs_cleanup and os.path.exists(self.working_dir):
-            shutil.rmtree(self.working_dir)
+        This function directly uses hhblits of hhtools. While in theory it would
+        be possible to do this by PSI-blasting on our own, hhblits is supposed
+        to be faster. Also it is supposed to prevent alignment corruption. The
+        alignment corruption is caused by low-scoring terminal alignments that
+        draw the sequences found by PSI-blast away from the optimum. By removing
+        these low scoring ends, part of the alignment corruption can be
+        suppressed.
 
-    def BuildQueryMSA(self, nrdb, iterations=1, mact=None, cpu=1):
-        """Builds the MSA for the query sequence
+        hhblits does **not** call PSIPRED on the MSA to predict the secondary
+        structure of the query sequence. This is done by addss.pl of hhtools.
+        The predicted secondary structure is stored together with the sequences
+        identified by hhblits.
 
-        This function directly uses hhblits of hhtools. While in theory it
-        would be possible to do this by PSI-blasting on our own, hhblits is
-        supposed to be faster. Also it is supposed to prevent alignment
-        corruption. The alignment corruption is caused by low-scoring terminal
-        alignments that draw the sequences found by PSI-blast away from the
-        optimum. By removing these low scoring ends, part of the alignment
-        corruption can be suppressed. hhblits does **not** call PSIPRED on the
-        MSA to predict the secondary structure of the query sequence. This is
-        done by addss.pl of hhtools. The predicted secondary structure is
-        stored together with the sequences identified by hhblits.
+        The produced A3M file can be parsed by :func:`ParseA3M`. If the file was
+        already produced, hhblits is not called again and the existing file path
+        is returned.
 
         :param nrdb: Database to be align against; has to be an hhblits database
         :type nrdb: :class:`str`
 
-        :param iterations: Number of hhblits iterations
-        :type iterations: :class:`int`
-
-        :param mact: ``-mact`` of hhblits
-        :type mact: :class:`float`
+        :param options: Dictionary of options to *hhblits*, one "-" is added in
+                        front of every key. Boolean True values add flag without
+                        value. Merged with default options {'cpu': 1, 'n': 1},
+                        where 'n' defines the number of iterations.
+        :type options: :class:`dict`
 
-        :param cpu: ``-cpu`` of hhblits
-        :type cpu: :class:`int`
+        :param a3m_file: a path of a3m_file to be used, optional
+        :type a3m_file: :class:`str`
 
-        :return: the path to the MSA file
+        :return: The path to the A3M file containing the MSA
         :rtype: :class:`str`
         """
-        a3m_file = '%s.a3m' % os.path.splitext(self.filename)[0]
+        if a3m_file is None:
+            a3m_file = '%s.a3m' % os.path.splitext(self.filename)[0]
+        if os.path.exists(a3m_file):
+            ost.LogInfo('Reusing already existing query alignment (%s)' % a3m_file)
+            return a3m_file
         ost.LogInfo('Using hhblits from "%s"' % self.hhsuite_root)
         full_nrdb = os.path.join(os.path.abspath(os.path.split(nrdb)[0]),
                                  os.path.split(nrdb)[1])
         # create MSA
-        hhblits_cmd = '%s -e 0.001 -cpu %d -i %s -oa3m %s -d %s -n %d' % \
-                      (self.hhblits_bin, cpu, self.filename, a3m_file,
-                       full_nrdb, iterations)
-        if mact:
-            hhblits_cmd += '-mact %f' % mact
+        opts = {'cpu' : 1, # no. of cpus used
+                'n'   : 1}   # no. of iterations
+        opts.update(options)
+        opt_cmd, _ = _ParseOptions(opts)
+        hhblits_cmd = '%s -e 0.001 -i %s -oa3m %s -d %s %s' % \
+                      (self.hhblits_bin, self.filename, a3m_file, full_nrdb,
+                       opt_cmd)
         job = subprocess.Popen(hhblits_cmd, shell=True, cwd=self.working_dir,
                                stdout=subprocess.PIPE, stderr=subprocess.PIPE)
         sout, _ = job.communicate()
-        #lines = sout.splitlines()
-        #for l in lines:
-        #    print l.strip()
-        #lines = serr.splitlines()
-        #for l in lines:
-        #    print l.strip()
+        lines = sout.splitlines()
+        for line in lines:
+            ost.LogVerbose(line.strip())
         if not os.path.exists(a3m_file):
             ost.LogWarning('Building query profile failed, no output')
             return a3m_file
@@ -580,7 +585,7 @@ class HHblits:
             if 'error' in line.lower():
                 ost.LogWarning('Predicting secondary structure for MSA '+
                                '(%s) failed, on command: %s' % (a3m_file, line))
-            return a3m_file
+                return a3m_file
         return a3m_file
 
     def A3MToProfile(self, a3m_file, hhm_file=None):
@@ -588,13 +593,18 @@ class HHblits:
         Converts the A3M alignment file to a hhm profile. If hhm_file is not
         given, the output file will be set to <:attr:`a3m_file`-basename>.hhm.
 
-        :param a3m_file: input MSA
+        The produced A3M file can be parsed by :func:`ParseHHM`.
+
+        If the file was already produced, the existing file path is returned
+        without recomputing it.
+
+        :param a3m_file: Path to input MSA as produced by :meth:`BuildQueryMSA`
         :type a3m_file: :class:`str`
 
-        :param hhm_file: output file name 
+        :param hhm_file: Desired output file name 
         :type hhm_file: :class:`str`
 
-        :return: the path to the profile
+        :return: Path to the profile file
         :rtype: :class:`str`
         """
         hhmake = os.path.join(self.bin_dir, 'hhmake')
@@ -609,24 +619,27 @@ class HHblits:
             raise IOError('could not convert a3m to hhm file')
         return hhm_file
 
-
     def A3MToCS(self, a3m_file, cs_file=None, options={}):
         """
         Converts the A3M alignment file to a column state sequence file. If
         cs_file is not given, the output file will be set to
         <:attr:`a3m_file`-basename>.seq219.
 
-        :param a3m_file: A3M file to be converted
+        If the file was already produced, the existing file path is returned
+        without recomputing it.
+
+        :param a3m_file: Path to input MSA as produced by :meth:`BuildQueryMSA`
         :type a3m_file: :class:`str`
 
-        :param cs_file: output file name (may be omitted)
+        :param cs_file: Output file name (may be omitted)
         :type cs_file: :class:`str`
 
-        :param options: dictionary of options to *cstranslate*, must come with
-                        the right amount of '-' in front.
+        :param options: Dictionary of options to *cstranslate*, one "-" is added
+                        in front of every key. Boolean True values add flag
+                        without value.
         :type options: :class:`dict`
 
-        :return: the path to the column state sequence file
+        :return: Path to the column state sequence file
         :rtype: :class:`str`
         """
         cstranslate = os.path.join(self.hhlib_dir, 'bin', 'cstranslate')
@@ -634,31 +647,36 @@ class HHblits:
             cs_file = '%s.seq219' % os.path.splitext(a3m_file)[0]
         if os.path.exists(cs_file):
             return cs_file
-        opt_cmd = list()
-        for k, val in options.iteritems():
-            if type(val) == type(True):
-                if val == True:
-                    opt_cmd.append('%s' % str(k))
-            else:
-                opt_cmd.append('%s %s' % (str(k), str(val)))
-        opt_cmd = ' '.join(opt_cmd)
-        cs_cmd = '%s -i %s -o %s %s' % (cstranslate, a3m_file, cs_file, opt_cmd)
+        opt_cmd, _ = _ParseOptions(options)
+        cs_cmd = '%s -i %s -o %s %s' % (
+            cstranslate,
+            os.path.abspath(a3m_file),
+            os.path.abspath(cs_file),
+            opt_cmd)
         ost.LogVerbose('converting %s to %s' % (a3m_file, cs_file))
-        job = subprocess.Popen(cs_cmd, shell=True,
+        job = subprocess.Popen(cs_cmd, shell=True, cwd=self.working_dir,
                                stdout=subprocess.PIPE, stderr=subprocess.PIPE)
         sout, _ = job.communicate()
-        #lines = serr.splitlines()
-        #for l in lines:
-        #    print l
         lines = sout.splitlines()
         for line in lines:
-            if line in 'Wrote abstract state sequence to %s' % cs_file:
+            if 'Wrote abstract state sequence to' in line:
                 return cs_file
         ost.LogWarning('Creating column state sequence file (%s) failed' % \
                        cs_file)
 
+    def Cleanup(self):
+        """Delete temporary data.
+
+        Delete temporary data if no working dir was given. Controlled by
+        :attr:`needs_cleanup`.
+        """
+        if self.needs_cleanup and os.path.exists(self.working_dir):
+            shutil.rmtree(self.working_dir)
+
     def CleanupFailed(self):
         '''In case something went wrong, call to make sure everything is clean.
+
+        This will delete the working dir independently of :attr:`needs_cleanup`.
         '''
         store_needs_cleanup = self.needs_cleanup
         self.needs_cleanup = True
@@ -667,50 +685,41 @@ class HHblits:
 
     def Search(self, a3m_file, database, options={}, prefix=''):
         """
-        Searches for templates in the given database. Before running the
-        search, the hhm file is copied. This makes it possible to launch
-        several hhblits instances at once. Upon success, the filename of the
-        result file is returned. This file may be parsed with
-        :func:`ParseHHblitsOutput`.
+        Searches for templates in the given database. Before running the search,
+        the hhm file is copied. This makes it possible to launch several hhblits
+        instances at once. Upon success, the filename of the result file is
+        returned. This file may be parsed with :func:`ParseHHblitsOutput`.
 
-        :param a3m_file: input MSA file
+        :param a3m_file: Path to input MSA as produced by :meth:`BuildQueryMSA`
         :type a3m_file: :class:`str`
 
-        :param database: search database, needs to be the common prefix of the
+        :param database: Search database, needs to be the common prefix of the
                          database files
         :type database: :class:`str`
 
-        :param options: dictionary of options, must come with the right amount
-                        of '-' in front.
+        :param options: Dictionary of options to *hhblits*, one "-" is added in
+                        front of every key. Boolean True values add flag without
+                        value. Merged with default options {'cpu': 1, 'n': 1},
+                        where 'n' defines the number of iterations.
         :type options: :class:`dict`
 
-        :param prefix: prefix to the result file
+        :param prefix: Prefix to the result file
         :type prefix: :class:`str`
 
-        :return: the path to the result file
+        :return: The path to the result file
         :rtype: :class:`str`
         """
         opts = {'cpu' : 1, # no. of cpus used
                 'n'   : 1}   # no. of iterations
         opts.update(options)
-        opt_cmd = []
-        opt_str = []
-        for k, val in opts.iteritems():
-            if type(val) == type(True):
-                if val == True:
-                    opt_cmd.append('-%s' % str(k))
-                    opt_str.append(str(k))
-            else:
-                opt_cmd.append('-%s %s' % (str(k), str(val)))
-                opt_str.append('%s%s' % (str(k), str(val)))
-        opt_cmd = ' '.join(opt_cmd)
-        opt_str = '_'.join(opt_str)
+        opt_cmd, opt_str = _ParseOptions(opts)
         base = os.path.basename(os.path.splitext(a3m_file)[0])
         hhr_file = '%s%s_%s.hhr' % (prefix, base, opt_str)
         hhr_file = os.path.join(self.working_dir, hhr_file)
-        search_cmd = '%s %s -e 0.001 -Z 10000 -B 10000 -i %s -o %s -d %s'%(
+        search_cmd = '%s %s -e 0.001 -Z 10000 -B 10000 -i %s -o %s -d %s' % (
             self.hhblits_bin,
-            opt_cmd, os.path.abspath(a3m_file),
+            opt_cmd,
+            os.path.abspath(a3m_file),
             hhr_file,
             os.path.join(os.path.abspath(os.path.split(database)[0]),
                          os.path.split(database)[1]))
@@ -722,20 +731,44 @@ class HHblits:
         if job.returncode != 0:
             lines = sout.splitlines()
             for line in lines:
-                print line.strip()
+                ost.LogError(line.strip())
             lines = serr.splitlines()
             for line in lines:
-                print line.strip()
+                ost.LogError(line.strip())
             return None
         return hhr_file
 
 
-__all__ = ['HHblits', 'HHblitsHit', 'HHblitsHeader', 'ParseHeaderLine',
+def _ParseOptions(opts):
+    """
+    :return: Tuple of strings (opt_cmd, opt_str), where opt_cmd can be
+             passed to command ("-" added in front of keys, options
+             separated by space) and opt_str (options separated by "_")
+             can be used for filenames.
+    :param opts: Dictionary of options, one "-" is added in front of every
+                 key. Boolean True values add flag without value.
+    """
+    opt_cmd = list()
+    opt_str = list()
+    for k, val in opts.iteritems():
+        if type(val) == type(True):
+            if val == True:
+                opt_cmd.append('-%s' % str(k))
+                opt_str.append(str(k))
+        else:
+            opt_cmd.append('-%s %s' % (str(k), str(val)))
+            opt_str.append('%s%s' % (str(k), str(val)))
+    opt_cmd = ' '.join(opt_cmd)
+    opt_str = '_'.join(opt_str)
+    return opt_cmd, opt_str
+
+
+__all__ = ['HHblits', 'HHblitsHit', 'HHblitsHeader',
            'ParseHHblitsOutput', 'ParseA3M', 'ParseHHM',
-           'EstimateMemConsumption']
+           'ParseHeaderLine']
 
-#  LocalWords:  HHblits MSA hhblits hhtools PSIPRED addss param nrdb str mact
+#  LocalWords:  HHblits MSA hhblits hhtools PSIPRED addss param nrdb str
 #  LocalWords:  cpu hhm func ParseHHblitsOutput ss pred conf msa hhsuite dir
 #  LocalWords:  attr basename rtype cstranslate tuple HHblitsHeader meth aln
-#  LocalWords:  HHblitsHit iteratable evalue pvalue neff hmms datetime
+#  LocalWords:  HHblitsHit iterable evalue pvalue neff hmms datetime
 #  LocalWords:  whitespace whitespaces
diff --git a/modules/bindings/pymod/utils.py b/modules/bindings/pymod/utils.py
index 02edd598f9b664c165f385b92429b619d7bf09fd..e476d04a9a752c64f86cf0f0e4d0a4418cfa405c 100644
--- a/modules/bindings/pymod/utils.py
+++ b/modules/bindings/pymod/utils.py
@@ -29,7 +29,7 @@ def SaveToTempDir(objects, seq_format='fasta', structure_format='pdb'):
       file_names.append(name)
       continue
     if isinstance(obj, mol.EntityView) or isinstance(obj, mol.EntityHandle):
-      name=os.path.join(tmp_dir_name, tmp_dir_name, 'mol%02d.pdb' % (index+1))
+      name=os.path.join(tmp_dir_name, 'mol%02d.pdb' % (index+1))
       io.SaveEntity(obj, name, structure_format)
       file_names.append(name)
       continue
diff --git a/modules/bindings/tests/test_hhblits.py b/modules/bindings/tests/test_hhblits.py
index c396528b8c623d620e07f7611431760afcfd1725..2ba099ced0c4e7f5dd7d0856431cd48d22a326b2 100644
--- a/modules/bindings/tests/test_hhblits.py
+++ b/modules/bindings/tests/test_hhblits.py
@@ -196,7 +196,7 @@ class TestHHblitsBindings(unittest.TestCase):
         _, self.tmpfile = tempfile.mkstemp(suffix='.seq219')
         os.remove(self.tmpfile)
         csfile = self.hh.A3MToCS("testfiles/testali.a3m",
-                                 cs_file=self.tmpfile, options={'--alphabet' :
+                                 cs_file=self.tmpfile, options={'-alphabet' :
                                                 os.path.join(self.hh.hhlib_dir,
                                                              'data',
                                                              'cs219.lib')})
@@ -211,7 +211,7 @@ class TestHHblitsBindings(unittest.TestCase):
                                        'TSKYR')
         self.hh = hhblits.HHblits(query_seq, self.hhroot)
         csfile = self.hh.A3MToCS("testfiles/testali.a3m",
-                                 options={'--alphabet' :
+                                 options={'-alphabet' :
                                           os.path.join(self.hh.hhlib_dir,
                                                        'data',
                                                        'cs219.lib')})
@@ -228,7 +228,7 @@ class TestHHblitsBindings(unittest.TestCase):
         self.hh = hhblits.HHblits(query_seq, self.hhroot)
         csfile = self.hh.A3MToCS("testfiles/testali.a3m",
                                  cs_file='testfiles/test.seq219',
-                                 options={'--alphabet' :
+                                 options={'-alphabet' :
                                           os.path.join(self.hh.hhlib_dir,
                                                        'data',
                                                        'cs219.lib')})
@@ -305,6 +305,17 @@ class TestHHblitsBindings(unittest.TestCase):
                          'HHHHHHHHHHHHCC')
         self.assertEqual(prof['msa'].GetCount(), 253)
 
+    def fastParseHeader(self):
+        header_line = '  1 814cbc1899f35c872169524af30fc2 100.0  5E-100' + \
+                      '  5E-104  710.5  34.1  277    3-293     2-280 (281)'
+        hit, offset = hhblits.ParseHeaderLine(header_line)
+        self.assertEqual(hit.hit_id, '814cbc1899f35c872169524af30fc2')
+        self.assertAlmostEqual(hit.evalue, 0)
+        self.assertAlmostEqual(hit.prob, 100.0)
+        self.assertAlmostEqual(hit.pvalue, 0)
+        self.assertAlmostEqual(hit.score, 710.5)
+        self.assertAlmostEqual(hit.ss_score, 34.1)
+
     def testParseHHblitsOutput(self):
         header, hits = hhblits.ParseHHblitsOutput(open("testfiles/test.hhr"))
         self.assertEqual(header.query, 'Test')
@@ -385,8 +396,6 @@ class TestHHblitsBindings(unittest.TestCase):
                          'Test                VDPVNFKLLSHCLLVTLAAHL\ne69e1ac0'+
                          'a4b2554d... ATPEQAQLVHKEIRKIVKDTC\n')
 
-# ParseHHblitsOutput
-
 if __name__ == "__main__":
     hhsuite_root_dir =  os.getenv('EBROOTHHMINSUITE')
     if not hhsuite_root_dir:
diff --git a/modules/config/base.hh b/modules/config/base.hh
index 2019437b5b47828492ec8de2422a77287d650092..b10638e7ede8f4a6f301aad7db6dd8d05b243bc0 100644
--- a/modules/config/base.hh
+++ b/modules/config/base.hh
@@ -51,28 +51,15 @@ typedef unsigned int uint;
 typedef std::complex<Real> Complex;
 typedef unsigned short Word;
 
+typedef std::string String;
 
+// NOTE: Before OST 1.8, we used to have round and rint functions defined here
+// -> round and rint are available for any compiler since many years now
+// -> Tested for GCC 4.1.2 - 9.0.0, clang 3.3.0 - 8.0.0, MSVC 2015 - 2017 using
+//    godbolt.org. In all cases: call with float is not casted to double, but
+//    kept as float which is desired behaviour for good performance.
 
-#ifndef round_function
-#define round_function
-#ifndef round
-inline Real round( Real d )
-{
-  return floor(d+Real(0.5));
-}
-#endif
-#endif
-
-#ifndef rint_function
-#define rint_function
-#ifndef rint
-inline Real rint(Real d)
-{
-  return floor(d+Real(0.5));
-}
-#endif
-#endif
-
+// NOTE: OST has not been tested in MSVC for a very long time!
 #if _MSC_VER
 #pragma warning(disable:4251)
 #pragma warning(disable:4275)
@@ -104,7 +91,4 @@ inline double log2( double n )
 #endif
 
 
-typedef std::string String;
-
-
 #endif
diff --git a/modules/conop/doc/compoundlib.rst b/modules/conop/doc/compoundlib.rst
index caca8445b454408a8b103e96129e847e71045d55..41f859e7a8787499553fad1b1639e0827379c60c 100644
--- a/modules/conop/doc/compoundlib.rst
+++ b/modules/conop/doc/compoundlib.rst
@@ -45,7 +45,7 @@ build the compound library manually.
       write mode, the programs can deadlock.
     :type readonly: :class:`bool`
     
-    :returns: The loaded compound lib
+    :returns: The loaded compound lib or None if it failed.
     
   .. staticmethod:: Create(database)
     
diff --git a/modules/conop/src/compound_lib.cc b/modules/conop/src/compound_lib.cc
index e2cf22263aa88071b425f686cd0d7a9666ef112c..3509db72e85d6c81db125ee910931ed7b435eea5 100644
--- a/modules/conop/src/compound_lib.cc
+++ b/modules/conop/src/compound_lib.cc
@@ -361,17 +361,20 @@ CompoundLibPtr CompoundLib::Load(const String& database, bool readonly)
                             static_cast<int>(aq.length()),
                             &stmt, NULL);
   lib->chem_type_available_ = retval==SQLITE_OK;
+  sqlite3_finalize(stmt);
   aq="SELECT name FROM chem_compounds LIMIT 1";
   retval=sqlite3_prepare_v2(lib->conn_, aq.c_str(),
                             static_cast<int>(aq.length()),
                             &stmt, NULL);
   lib->name_available_ = retval==SQLITE_OK;
+  sqlite3_finalize(stmt);
   // check if InChIs are available
   aq="SELECT inchi_code FROM chem_compounds LIMIT 1";
   retval=sqlite3_prepare_v2(lib->conn_, aq.c_str(),
                             static_cast<int>(aq.length()),
                             &stmt, NULL);
   lib->inchi_available_ = retval==SQLITE_OK;
+  sqlite3_finalize(stmt);
 
   lib->creation_date_ = lib->GetCreationDate();
   lib->ost_version_used_ = lib->GetOSTVersionUsed();
@@ -428,7 +431,7 @@ void CompoundLib::LoadBondsFromDB(CompoundPtr comp, int pk) const {
   } else {
     LOG_ERROR(sqlite3_errmsg(conn_));
   } 
-  sqlite3_finalize(stmt);  
+  sqlite3_finalize(stmt);
 }
 
 CompoundPtr CompoundLib::FindCompound(const String& id, 
@@ -522,7 +525,11 @@ CompoundLib::CompoundLib():
 
 CompoundLib::~CompoundLib() {
   if (conn_) {
-    sqlite3_close(conn_);
+    int retval = sqlite3_close(conn_);
+    if (retval != SQLITE_OK) {
+      LOG_ERROR("Problem while closing SQLite db for CompoundLib: "
+                << sqlite3_errmsg(conn_));
+    }
   }
 }
 }}
diff --git a/modules/conop/src/rule_based.cc b/modules/conop/src/rule_based.cc
index 6e8b712fe63fdff02fec5836fd39316583dacc35..fe2013e254b80f5b4ff5b52474ff672873adb72c 100644
--- a/modules/conop/src/rule_based.cc
+++ b/modules/conop/src/rule_based.cc
@@ -19,6 +19,7 @@
 #include <limits>
 #include <ost/log.hh>
 #include <ost/profile.hh>
+#include <ost/message.hh>
 #include <ost/mol/xcs_editor.hh>
 #include <ost/mol/bond_handle.hh>
 #include <ost/mol/torsion_handle.hh>
@@ -30,8 +31,6 @@
 
 namespace ost { namespace conop {
 
-
-
 void RuleBasedProcessor::DoProcess(DiagnosticsPtr diags, 
                                    mol::EntityHandle ent) const
 {
@@ -199,6 +198,11 @@ String RuleBasedProcessor::ToString() const {
   return ss.str();
 }
 
-
+void RuleBasedProcessor::_CheckLib() const {
+  if (!lib_) {
+    throw Error("Cannot initialize RuleBasedProcessor without a valid "
+                "CompoundLib object!");
+  }
+}
 
 }}
diff --git a/modules/conop/src/rule_based.hh b/modules/conop/src/rule_based.hh
index 3b91771a29c027f9cbd503c3927f3a13b58add34..719ba78030003cae44dfa881d8339757b3008102 100644
--- a/modules/conop/src/rule_based.hh
+++ b/modules/conop/src/rule_based.hh
@@ -40,13 +40,19 @@ public:
     lib_(compound_lib), fix_element_(true), strict_hydrogens_(false), 
     unk_res_treatment_(CONOP_WARN), unk_atom_treatment_(CONOP_WARN)
   {
+    _CheckLib();
   }
 
-  RuleBasedProcessor(CompoundLibPtr compound_lib, bool fe, bool sh, ConopAction ur, 
-                     ConopAction ua, bool bf, bool at, bool cn, bool aa, ConopAction zo): 
+  RuleBasedProcessor(CompoundLibPtr compound_lib, bool fe, bool sh,
+                     ConopAction ur, ConopAction ua, bool bf, bool at, bool cn,
+                     bool aa, ConopAction zo): 
     Processor(bf, at, cn, aa, zo), lib_(compound_lib), fix_element_(fe), 
     strict_hydrogens_(sh), unk_res_treatment_(ur), 
-    unk_atom_treatment_(ua) {}
+    unk_atom_treatment_(ua)
+  {
+    _CheckLib();
+  }
+
   ConopAction GetUnkResidueTreatment() const {
     return unk_res_treatment_;
   }
@@ -91,6 +97,8 @@ protected:
   virtual void DoProcess(DiagnosticsPtr diags, 
                          mol::EntityHandle ent) const;
 private:
+  void _CheckLib() const;
+
   CompoundLibPtr lib_;
   bool fix_element_;
   bool strict_hydrogens_;
diff --git a/modules/conop/tests/test_rule_based_conop.cc b/modules/conop/tests/test_rule_based_conop.cc
index 6444695d9f85cece9e06c9b4c6153ec8645963ba..1ce4ebe2436ff5f3d148a6272d3cb7b73cefb481 100644
--- a/modules/conop/tests/test_rule_based_conop.cc
+++ b/modules/conop/tests/test_rule_based_conop.cc
@@ -50,9 +50,24 @@ CompoundLibPtr load_lib()
 
 BOOST_AUTO_TEST_SUITE(conop);
 
+BOOST_AUTO_TEST_CASE(rule_based_init_check)
+{
+  CompoundLibPtr lib; // null ptr is return value when library loading failed
+  BOOST_CHECK_THROW(RuleBasedProcessor rbc1(lib), ost::Error);
+  BOOST_CHECK_THROW(RuleBasedProcessor rbc2(lib, true, false, CONOP_WARN,
+                                            CONOP_WARN, false, true, true, true,
+                                            CONOP_WARN), ost::Error);
+  lib = load_lib();
+  if (!lib) { return; }
+  BOOST_CHECK_NO_THROW(RuleBasedProcessor rbc3(lib));
+  BOOST_CHECK_NO_THROW(RuleBasedProcessor rbc4(lib, true, false, CONOP_WARN,
+                                               CONOP_WARN, false, true, true,
+                                               true, CONOP_WARN));
+}
+
 BOOST_AUTO_TEST_CASE(rule_based_set_get_flags)
 {
-   CompoundLibPtr lib=load_lib(); 
+   CompoundLibPtr lib = load_lib();
    if (!lib) { return; }
    RuleBasedProcessor rbc(lib);
    // check the defaults
@@ -76,7 +91,7 @@ BOOST_AUTO_TEST_CASE(rule_based_set_get_flags)
 
 BOOST_AUTO_TEST_CASE(rule_based_connect)
 {
-  CompoundLibPtr lib=load_lib();
+  CompoundLibPtr lib = load_lib();
   if (!lib) { return; }
   RuleBasedProcessor rbc(lib);
   EntityHandle ent = CreateEntity();
@@ -94,7 +109,7 @@ BOOST_AUTO_TEST_CASE(rule_based_connect)
 
 BOOST_AUTO_TEST_CASE(rule_based_unk_atoms)
 {
-  CompoundLibPtr lib = load_lib(); 
+  CompoundLibPtr lib = load_lib();
   if (!lib) { return; }
   RuleBasedProcessor rbc(lib);
   EntityHandle ent = CreateEntity();
@@ -125,7 +140,7 @@ BOOST_AUTO_TEST_CASE(rule_based_unk_atoms)
 
 BOOST_AUTO_TEST_CASE(guesses_elements_of_unknown_atoms)
 {
-  CompoundLibPtr lib = load_lib(); 
+  CompoundLibPtr lib = load_lib();
   if (!lib) { return; }
   RuleBasedProcessor rbc(lib);
   EntityHandle ent = CreateEntity();
@@ -142,7 +157,7 @@ BOOST_AUTO_TEST_CASE(guesses_elements_of_unknown_atoms)
 
 BOOST_AUTO_TEST_CASE(fills_properties_of_unknown_residues)
 {
-  CompoundLibPtr lib = load_lib(); 
+  CompoundLibPtr lib = load_lib();
   if (!lib) { return; }
   RuleBasedProcessor rbc(lib);
   EntityHandle ent = CreateEntity();
@@ -160,7 +175,7 @@ BOOST_AUTO_TEST_CASE(fills_properties_of_unknown_residues)
 BOOST_AUTO_TEST_CASE(connects_atoms_of_unknown_residues_based_on_distance) 
 {
 
-  CompoundLibPtr lib = load_lib(); 
+  CompoundLibPtr lib = load_lib();
   if (!lib) { return; }
   RuleBasedProcessor rbc(lib);
   EntityHandle ent = CreateEntity();
@@ -177,7 +192,7 @@ BOOST_AUTO_TEST_CASE(connects_atoms_of_unknown_residues_based_on_distance)
 
 BOOST_AUTO_TEST_CASE(rule_based_unk_res)
 {
-  CompoundLibPtr lib = load_lib(); 
+  CompoundLibPtr lib = load_lib();
   if (!lib) { return; }
   RuleBasedProcessor rbc(lib);
   EntityHandle ent = CreateEntity();
diff --git a/modules/doc/actions.rst b/modules/doc/actions.rst
index fb2f882d86d64700652cdf26921ecbd133ec277a..4ff4afc6aabf48fc88e8fb75ec9a717e51a19f5a 100644
--- a/modules/doc/actions.rst
+++ b/modules/doc/actions.rst
@@ -109,7 +109,12 @@ Example usage:
 
   $ curl https://www.cameo3d.org/static/data/modeling/2018.03.03/5X7J_B/bu_target_01.pdb > reference.pdb
   $ curl https://www.cameo3d.org/static/data/modeling/2018.03.03/5X7J_B/servers/server11/oligo_model-1/superposed_oligo_model-1.pdb > model.pdb
-  $ $OST_ROOT/bin/ost compare-structures --model model.pdb --reference reference.pdb --output output.json --qs-score --residue-number-alignment --lddt --structural-checks --consistency-checks --inclusion-radius 15.0 --bond-tolerance 15.0 --angle-tolerance 15.0 --molck --remove oxt hyd unk --clean-element-column --map-nonstandard-residues
+  $ $OST_ROOT/bin/ost compare-structures \
+        --model model.pdb --reference reference.pdb --output output.json \
+        --qs-score --residue-number-alignment --lddt --structural-checks \
+        --consistency-checks --inclusion-radius 15.0 --bond-tolerance 15.0 \
+        --angle-tolerance 15.0 --molck --remove oxt hyd unk \
+        --clean-element-column --map-nonstandard-residues
 
   ################################################################################
   Reading input files (fault_tolerant=False)
@@ -251,8 +256,8 @@ In the example above the output file looks as follows:
 
   {
       "result": {
-          "": {
-              "": {
+          "model.pdb": {
+              "reference.pdb": {
                   "info": {
                       "residue_names_consistent": true, 
                       "mapping": {
@@ -345,12 +350,13 @@ In the example above the output file looks as follows:
       }
   }
 
-If only all the structures are clean one can omit all the checking steps and
+If all the structures are clean one can omit all the checking steps and
 calculate eg. QS-score directly:
 
 .. code:: console
 
-  $OST_ROOT/bin/ost compare-structures --model model.pdb --reference reference.pdb --output output_qs.json --qs-score --residue-number-alignment
+  $ $OST_ROOT/bin/ost compare-structures --model model.pdb --reference reference.pdb --output output_qs.json --qs-score --residue-number-alignment
+
   ################################################################################
   Reading input files (fault_tolerant=False)
    --> reading model from model.pdb
@@ -359,7 +365,6 @@ calculate eg. QS-score directly:
   imported 3 chains, 408 residues, 3011 atoms; with 0 helices and 0 strands
   ################################################################################
   Comparing model.pdb to reference.pdb
-  Chains removed from reference.pdb: _
   Chains in reference.pdb: AB
   Chains in model.pdb: AB
   Chemically equivalent chain-groups in reference.pdb: [['B', 'A']]
diff --git a/modules/doc/contributing.rst b/modules/doc/contributing.rst
index c171b7c0ddf1304876a2a78e60d063ff6fc63cb2..21f56abca9215334d2379e0811954735a3803b17 100644
--- a/modules/doc/contributing.rst
+++ b/modules/doc/contributing.rst
@@ -143,7 +143,7 @@ To get the new action recognised by ``make`` to be placed in
 :file:`stage/libexec/openstructure`, it has to be registered with ``cmake`` in
 :file:`actions/CMakeLists.txt`:
 
-.. code-block:: console
+.. code-block:: cmake
   :linenos:
 
    add_custom_target(actions ALL)
@@ -165,10 +165,10 @@ directory. There are only two really important points:
   environment your action is running in. Actions are called by :file:`ost`,
   that's enough to get everything just right.
 
-* The action of your action happens in the ost branch of the script.
+* The code of your action belongs in the :attr:`__main__` branch of the script.
   Your action will have own function definitions, variables and all the bells
-  and whistles. Hiding behind ost keeps everything separated and makes
-  things easier when it gets to debugging. So just after
+  and whistles. Hiding behind :attr:`__main__` keeps everything separated and
+  makes things easier when it gets to debugging. So just after
 
   .. code-block:: python
 
diff --git a/modules/doc/install.rst b/modules/doc/install.rst
index 585dafe3cfd87a45afcd88c6cccec4213a83a125..b6664cfb40580cca4d28a2271ac3cd34af8c05d7 100644
--- a/modules/doc/install.rst
+++ b/modules/doc/install.rst
@@ -4,14 +4,22 @@ Installing OpenStructure From Source
 Brief Overview
 --------------------------------------------------------------------------------
 
-Compiling OpenStructure consists of several steps that are described below in
-more detail. In essence, these steps are:
+For a simple and portable way to use OpenStructure we recommend using a
+container solution. We provide recipes to build images for
+`Docker <https://www.docker.com/>`_ and
+`Singularity <https://www.sylabs.io/guides/2.5.1/user-guide>`_.
+The latest recipes and instructions can be found on our GitLab site
+(`Docker instructions <https://git.scicore.unibas.ch/schwede/openstructure/tree/develop/docker>`_ and
+`Singularity instructions <https://git.scicore.unibas.ch/schwede/openstructure/tree/develop/singularity>`_).
+
+If you wish to compile OpenStructure outside of a container, you need to follow
+the steps which we describe in detail below. In essence, these steps are:
 
 * Installing the Dependencies
 * Checking out the source code from GIT
 * Configuring the build with cmake
 * Compiling an Linking
- 
+
 
 Installing the Dependencies
 --------------------------------------------------------------------------------
@@ -299,16 +307,8 @@ from source.
 
 On some Linux distributions, there are issues with Qt4 and hence it may not be
 possible to build OpenStructure with GUI support at all. This is for instance
-known to be an issue with boost versions >= 1.62.
-
-An additional problem arises for gcc versions >= 6. There an extra flag is
-required to use the C++98 standard:
-
-.. code-block:: bash
-
-  cmake . -DOPTIMIZE=ON -DENABLE_INFO=OFF -DCMAKE_CXX_FLAGS='-std=c++98'
-
-We hope to support Qt5 and C++11 in the next OpenStructure release.
+known to be an issue with boost versions >= 1.62. We hope to support Qt5 in the
+next OpenStructure release.
 
 
 **Ubuntu 16.04 with GUI**
@@ -342,12 +342,11 @@ All the dependencies can be installed from the package manager as follows:
   sudo dnf install cmake eigen3-devel boost-devel libpng-devel python2-devel \
                    fftw-devel libtiff-devel
 
-Fedora 26 has gcc 7 and boost 1.63 by default. Hence, we will need to disable
-Qt4, the GUI and add the extra flag described above:
+Here, we compile a version without GUI as follows:
 
 .. code-block:: bash
 
-  cmake . -DOPTIMIZE=ON -DENABLE_INFO=OFF -DCMAKE_CXX_FLAGS='-std=c++98'
+  cmake . -DOPTIMIZE=ON -DENABLE_INFO=OFF
 
 
 **macOS with Homebrew without GUI**
diff --git a/modules/gfx/src/scene.cc b/modules/gfx/src/scene.cc
index 5c1e9cbb7149115b219ceccc8e88698fa5e70e97..98b47c9cbb16ed2477cb5fb0fd001bcaee8f0c57 100644
--- a/modules/gfx/src/scene.cc
+++ b/modules/gfx/src/scene.cc
@@ -1097,7 +1097,7 @@ bool Scene::HasNode(const String& name) const
 {
   FindNode fn(name);
   this->Apply(fn);
-  return fn.node;
+  return static_cast<bool>(fn.node);
 }
 
 void Scene::Apply(const InputEvent& e, bool request_redraw)
diff --git a/modules/gfx/src/texture.hh b/modules/gfx/src/texture.hh
index aa3fec45f8f0a5d288d977dd290e40210a9c5253..cf25b9d4f83a8c86d442d4b82db0ac0346d8b433 100644
--- a/modules/gfx/src/texture.hh
+++ b/modules/gfx/src/texture.hh
@@ -51,7 +51,7 @@ public:
 
   Texture(const Bitmap& b);
 
-  bool IsValid() const {return d_;}
+  bool IsValid() const {return static_cast<bool>(d_);}
 
   float* data() {return &d_[0];}
   
diff --git a/modules/gui/src/data_viewer/data_viewer.hh b/modules/gui/src/data_viewer/data_viewer.hh
index ca6893edc34895fde6df19e2b1344ea11affcb98..c1f18847ec36c270de0d93183342c8427dd604a1 100644
--- a/modules/gui/src/data_viewer/data_viewer.hh
+++ b/modules/gui/src/data_viewer/data_viewer.hh
@@ -25,6 +25,8 @@
 #ifndef IMG_GUI_DATA_VIEWER_H
 #define IMG_GUI_DATA_VIEWER_H
 
+#ifndef Q_MOC_RUN
+
 #include <map>
 
 #include <ost/base.hh>
@@ -42,6 +44,9 @@
 #include "fft_panel.hh"
 #include <ost/gui/module_config.hh>
 
+#endif
+
+
 //fw decl
 class QLabel;
 
diff --git a/modules/gui/src/data_viewer/data_viewer_panel_base.hh b/modules/gui/src/data_viewer/data_viewer_panel_base.hh
index 699efd372cad4dc5b0a9f26a205b4487f5b33168..be3ac007ec7550c4242e8cbc607654c5eca12fea 100644
--- a/modules/gui/src/data_viewer/data_viewer_panel_base.hh
+++ b/modules/gui/src/data_viewer/data_viewer_panel_base.hh
@@ -26,10 +26,10 @@
 #define DATA_VIEWER_PANEL_BASE_HH_
 
 #include <map>    
-#include <boost/shared_ptr.hpp>
-
 
+#ifndef Q_MOC_RUN
 
+#include <boost/shared_ptr.hpp>
 #include <ost/base.hh>
 #include <ost/img/extent.hh>
 #include <ost/img/data_observer.hh>
@@ -41,6 +41,8 @@
 
 #include <ost/img/normalizer_impl.hh>
 
+#endif
+
 #include <QWidget>
 #include <QCursor>
 #include <QMenu>
diff --git a/modules/gui/src/data_viewer/fft_panel.hh b/modules/gui/src/data_viewer/fft_panel.hh
index 75a30fed87e8a1018887d10a56fd7e0465674ce0..ac0df0f56e39d148ab16735fe6e7fe3ce082244e 100644
--- a/modules/gui/src/data_viewer/fft_panel.hh
+++ b/modules/gui/src/data_viewer/fft_panel.hh
@@ -24,10 +24,14 @@
   Author: Andreas Schenk
 */
 
+#ifndef Q_MOC_RUN
+
 #include <ost/gui/module_config.hh>
 #include <ost/img/data_observer.hh>
 #include "data_viewer_panel_base.hh"
 
+#endif
+
 namespace ost { namespace img { namespace gui {
 
 class ParentDataObserver: public DataObserver
diff --git a/modules/gui/src/file_type_dialog.hh b/modules/gui/src/file_type_dialog.hh
index f1fe6ce5e5fc6e856b15c091b65d03e41309bda2..270d35131d96f77ebfee4de64420abe4f933590d 100644
--- a/modules/gui/src/file_type_dialog.hh
+++ b/modules/gui/src/file_type_dialog.hh
@@ -25,7 +25,7 @@
 
 #include <ost/gui/module_config.hh>
 
-
+#ifndef Q_MOC_RUN
 
 #include <ost/io/mol/entity_io_handler.hh>
 #include <ost/io/seq/sequence_io_handler.hh>
@@ -34,6 +34,8 @@
 #include <ost/io/img/map_io_handler.hh>
 #endif
 
+#endif
+
 #include <QDialog>
 #include <QMetaType>
 
diff --git a/modules/img/base/src/base.hh b/modules/img/base/src/base.hh
index 0122a07371705cdc89808f6f3966ab67cb9e2e6a..f5d73f5f4f1aa18f897d6e4710d5108ae7380bbc 100644
--- a/modules/img/base/src/base.hh
+++ b/modules/img/base/src/base.hh
@@ -44,21 +44,11 @@
 #pragma warning(disable:4231)
 #endif
 
-
 #ifdef IRIX
-inline Real round(Real x) {return rint(x);}
-inline float roundf(float x) {return (float)rint((Real)x);}
 using  std::cos;
 using  std::sin;
 #endif
 
-#ifndef round_function
-#define round_function
-#ifndef round
-inline int round(Real x) {return floor(x+0.5);}
-#endif
-#endif
-
 namespace ost { namespace img {
 
   // String is not always predefined
diff --git a/modules/img/base/src/image_handle.cc b/modules/img/base/src/image_handle.cc
index c0059be0b852490c5e50306fffab6fca18f23208..fe84239cd624c3c3e6584151ddff9ecf1dffca3d 100644
--- a/modules/img/base/src/image_handle.cc
+++ b/modules/img/base/src/image_handle.cc
@@ -153,7 +153,7 @@ void ImageHandle::Reset(const Extent &e, DataType type,DataDomain dom)
 
 bool ImageHandle::IsValid() const
 {
-  return impl_;
+  return static_cast<bool>(impl_);
 }
 
 long ImageHandle::MemSize() const
diff --git a/modules/info/src/info_handle.cc b/modules/info/src/info_handle.cc
index 9b6ecbc49a7dac0caddd7347743f9dd634cdee2c..263ae222c9ed261b0de6061631dfb94e98753bee 100644
--- a/modules/info/src/info_handle.cc
+++ b/modules/info/src/info_handle.cc
@@ -74,7 +74,7 @@ void InfoHandle::Export(const String& file) const
 
 bool InfoHandle::IsValid() const
 {
-  return impl_;
+  return static_cast<bool>(impl_);
 }
 
 
diff --git a/modules/io/doc/io.rst b/modules/io/doc/io.rst
index 580c6650a8ead19afc4d372bc65b106fdb7027f2..ccff89f86ea08263adbed07e6bf45d598541594a 100644
--- a/modules/io/doc/io.rst
+++ b/modules/io/doc/io.rst
@@ -142,8 +142,8 @@ Loading sequence or alignment files
     myseq = io.LoadSequence('seq.fasta')
     # for obtaining a SequenceList
     seqlist = io.LoadSequenceList('seqs.fasta')
-    # or for multiple aligned fasta files use
-    aln = io.LoadAlignment('algnm.aln',format="clustal")
+    # or for multiple alignments (here from CLUSTAL)
+    aln = io.LoadAlignment('algnm.aln', format="clustal")
     
   For a list of file formats supported by :func:`LoadSequence` see
   :doc:`sequence_formats`.
@@ -212,11 +212,11 @@ Saving Sequence Data
   .. code-block:: python
 
     # recognizes FASTA file by file extension
-    io.SaveSequence(myseq,'seq.fasta')
+    io.SaveSequence(myseq, 'seq.fasta')
     # for saving a SequenceList
-    io.SaveSequenceList(seqlist,'seqlist.fasta')
-    # or multiple aligned fasta files
-    io.SaveAlignment(aln,'algnm.aln',format="clustal")
+    io.SaveSequenceList(seqlist, 'seqlist.fasta')
+    # or for multiple alignments (here in FASTA format)
+    io.SaveAlignment(aln, 'aln.fasta')
     
   For a list of file formats supported by :func:`SaveSequence` see
   :doc:`sequence_formats`.
diff --git a/modules/io/doc/mmcif.rst b/modules/io/doc/mmcif.rst
index 23dc32f461a08cf1633362e60bb3b9c54ecb09af..479085543c91e249c98731dcbf613c21548b5565 100644
--- a/modules/io/doc/mmcif.rst
+++ b/modules/io/doc/mmcif.rst
@@ -373,6 +373,33 @@ of the annotation available.
     Also available as :meth:`GetTitle`. May also be modified by
     :meth:`SetTitle`.
 
+  .. attribute:: book_publisher
+
+    Name of publisher of the citation, relevant for books and book chapters.
+
+    Also available as :meth:`GetBookPublisher` and :meth:`SetBookPublisher`.
+
+  .. attribute:: book_publisher_city
+
+    City of the publisher of the citation, relevant for books and book
+    chapters.
+
+    Also available as :meth:`GetBookPublisherCity` and
+    :meth:`SetBookPublisherCity`.
+
+ 
+  .. attribute:: citation_type
+
+     Defines where a citation was published. Either journal, book or unknown.
+
+     Also available as :meth:`GetCitationType`. May also be modified by
+     :meth:`SetCitationType` with values from :class:`MMCifInfoCType`. For
+     conveinience setters :meth:`SetCitationTypeJournal`,
+     :meth:`SetCitationTypeBook` and :meth:`SetCitationTypeUnknown` exist.
+
+     For checking the type of a citation, :meth:`IsCitationTypeJournal`,
+     :meth:`IsCitationTypeBook` and :meth:`IsCitationTypeUnknown` can be used.
+
   .. attribute:: authors
 
     Stores a :class:`~ost.StringList` of authors.
@@ -460,6 +487,54 @@ of the annotation available.
 
     See :attr:`title`
 
+  .. method:: GetBookPublisher
+
+    See :attr:`book_publisher`
+
+  .. method:: SetBookPublisher
+
+    See :attr:`book_publisher`
+
+  .. method:: GetBookPublisherCity
+
+    See :attr:`book_publisher_city`
+
+  .. method:: SetBookPublisherCity
+
+    See :attr:`book_publisher_city`
+
+  .. method:: GetCitationType()
+
+    See :attr:`citation_type`
+
+  .. method:: SetCitationType(publication_type)
+
+    See :attr:`citation_type`
+
+  .. method:: SetCitationTypeJournal()
+
+    See :attr:`citation_type`
+
+  .. method:: SetCitationTypeBook()
+
+    See :attr:`citation_type`
+
+  .. method:: SetCitationTypeUnknown()
+
+    See :attr:`citation_type`
+
+  .. method:: IsCitationTypeJournal()
+
+    See :attr:`citation_type`
+
+  .. method:: IsCitationTypeBook()
+
+    See :attr:`citation_type`
+
+  .. method:: IsCitationTypeUnknown()
+
+    See :attr:`citation_type`
+ 
   .. method:: GetAuthorList()
 
     See :attr:`authors`
diff --git a/modules/io/pymod/export_mmcif_io.cc b/modules/io/pymod/export_mmcif_io.cc
index dd90843e39fa7a32ed661975b208ea6325e7f7db..806031da6e317f6496775e0f0b0c1ef6d613b5ed 100644
--- a/modules/io/pymod/export_mmcif_io.cc
+++ b/modules/io/pymod/export_mmcif_io.cc
@@ -49,6 +49,12 @@ void export_mmcif_io()
                                    return_value_policy<copy_const_reference>()))
     ;
 
+  enum_<MMCifInfoCitation::MMCifInfoCType>("MMCifInfoCType")
+    .value("Journal", MMCifInfoCitation::JOURNAL)
+    .value("Book", MMCifInfoCitation::BOOK)
+    .value("Unknown", MMCifInfoCitation::UNKNOWN)
+  ;
+ 
   class_<MMCifInfoCitation>("MMCifInfoCitation", init<>())
     .def("SetID", &MMCifInfoCitation::SetID)
     .def("GetID", &MMCifInfoCitation::GetID)
@@ -72,6 +78,18 @@ void export_mmcif_io()
     .def("GetYear", &MMCifInfoCitation::GetYear)
     .def("SetTitle", &MMCifInfoCitation::SetTitle)
     .def("GetTitle", &MMCifInfoCitation::GetTitle)
+    .def("SetBookPublisher", &MMCifInfoCitation::SetBookPublisher)
+    .def("GetBookPublisher", &MMCifInfoCitation::GetBookPublisher)
+    .def("SetBookPublisherCity", &MMCifInfoCitation::SetBookPublisherCity)
+    .def("GetBookPublisherCity", &MMCifInfoCitation::GetBookPublisherCity)
+    .def("SetCitationType", &MMCifInfoCitation::SetCitationType)
+    .def("SetCitationTypeJournal", &MMCifInfoCitation::SetCitationTypeJournal)
+    .def("SetCitationTypeBook", &MMCifInfoCitation::SetCitationTypeBook)
+    .def("SetCitationTypeUnknown", &MMCifInfoCitation::SetCitationTypeUnknown)
+    .def("GetCitationType", &MMCifInfoCitation::GetCitationType)
+    .def("IsCitationTypeJournal", &MMCifInfoCitation::IsCitationTypeJournal)
+    .def("IsCitationTypeBook", &MMCifInfoCitation::IsCitationTypeBook)
+    .def("IsCitationTypeUnknown", &MMCifInfoCitation::IsCitationTypeUnknown)
     .def("SetAuthorList", &MMCifInfoCitation::SetAuthorList)
     .def("GetAuthorList", make_function(&MMCifInfoCitation::GetAuthorList,
                                    return_value_policy<copy_const_reference>()))
@@ -94,9 +112,18 @@ void export_mmcif_io()
                   &MMCifInfoCitation::SetYear)
     .add_property("title", &MMCifInfoCitation::GetTitle,
                   &MMCifInfoCitation::SetTitle)
+    .add_property("book_publisher", &MMCifInfoCitation::GetBookPublisher,
+                  &MMCifInfoCitation::SetBookPublisher)
+    .add_property("book_publisher_city",
+                  &MMCifInfoCitation::GetBookPublisherCity,
+                  &MMCifInfoCitation::SetBookPublisherCity)
+    .add_property("citation_type", &MMCifInfoCitation::GetCitationType,
+                  &MMCifInfoCitation::SetCitationType)
     .add_property("authors", make_function(&MMCifInfoCitation::GetAuthorList,
                                    return_value_policy<copy_const_reference>()),
                   &MMCifInfoCitation::SetAuthorList)
+    .def("__eq__", &MMCifInfoCitation::operator==) 
+    .def("__ne__", &MMCifInfoCitation::operator!=)
   ;
 
   class_<std::vector<MMCifInfoCitation> >("MMCifInfoCitationList", init<>())
diff --git a/modules/io/pymod/wrap_io.cc b/modules/io/pymod/wrap_io.cc
index a5abde8cc5966a3ddd7bc21b41416117f663641f..55e0dd61c3c2a0646bc63c4e810cf05a3fc50343 100644
--- a/modules/io/pymod/wrap_io.cc
+++ b/modules/io/pymod/wrap_io.cc
@@ -45,12 +45,6 @@ BOOST_PYTHON_FUNCTION_OVERLOADS(load_mentity_ov,LoadManagedEntity,2,3);
 BOOST_PYTHON_FUNCTION_OVERLOADS(load_surface_ov,LoadSurface,1,2);
 BOOST_PYTHON_FUNCTION_OVERLOADS(load_msurface_ov,LoadManagedSurface,2,3);
 
-BOOST_PYTHON_FUNCTION_OVERLOADS(load_alignment_ov,
-                                LoadAlignment, 1, 2)
-                                
-BOOST_PYTHON_FUNCTION_OVERLOADS(save_alignment_ov,
-                                SaveAlignment, 2, 3)
-
 
 void save_ent_view(const mol::EntityView& en, const String& filename,
                    const String& format="auto") {
@@ -95,25 +89,31 @@ BOOST_PYTHON_MODULE(_ost_io)
       save_entity_handle_ov(args("entity", "filename", "format")));
 
   def("LoadAlignment", &LoadAlignment,
-      load_alignment_ov(args("filename", "format")));
+      (arg("filename"), arg("format")="auto"));
   def("AlignmentFromString", &AlignmentFromString);
   def("AlignmentFromStream", &AlignmentFromStream);
   def("AlignmentToString", &AlignmentToString);
-  def("LoadSequenceList", &LoadSequenceList, arg("format")="auto");
-  def("LoadSequence", &LoadSequence, arg("format")="auto");
+  def("LoadSequenceList", &LoadSequenceList,
+      (arg("filename"), arg("format")="auto"));
+  def("LoadSequence", &LoadSequence,
+      (arg("filename"), arg("format")="auto"));
   def("SequenceListFromString", &SequenceListFromString);
   def("SequenceFromString", &SequenceFromString);  
-  def("SaveAlignment", &SaveAlignment, arg("format")="auto");
+  def("SaveAlignment", &SaveAlignment,
+      (arg("aln"), arg("filename"), arg("format")="auto"));
   
-  def("LoadSequenceProfile", &LoadSequenceProfile, arg("format")="auto");
+  def("LoadSequenceProfile", &LoadSequenceProfile,
+      (arg("filename"), arg("format")="auto"));
 
   def("LoadSurface",LoadSurface,load_surface_ov());
   def("LoadManagedSurface",LoadManagedSurface,load_msurface_ov());
 
   def("SequenceToString", &SequenceToString);
   def("SequenceListToString", &SequenceListToString); 
-  def("SaveSequenceList", &SaveSequenceList, arg("format")="auto");
-  def("SaveSequence", &SaveSequence, arg("format")="auto");
+  def("SaveSequenceList", &SaveSequenceList,
+      (arg("seq_list"), arg("filename"), arg("format")="auto"));
+  def("SaveSequence", &SaveSequence,
+      (arg("sequence"), arg("filename"), arg("format")="auto"));
   def("LoadSDF", &LoadSDF);
 
   def("LoadCRD", &LoadCRD);
diff --git a/modules/io/src/mol/mmcif_info.hh b/modules/io/src/mol/mmcif_info.hh
index 0a00a838258de949f47c1796a9c70c93b284ef1e..68d60318f9fe1328eca94b7fc33daca14236749e 100644
--- a/modules/io/src/mol/mmcif_info.hh
+++ b/modules/io/src/mol/mmcif_info.hh
@@ -390,11 +390,17 @@ private:
 
 class DLLEXPORT_OST_IO MMCifInfoCitation {
 public:
+  /// \enum types of citations
+  typedef enum {
+    JOURNAL,
+    BOOK,
+    UNKNOWN
+  } MMCifInfoCType;
+
   /// \brief Create a citation.
   MMCifInfoCitation(): id_(""), where_(UNKNOWN), cas_(""), published_in_(""),
     volume_(""), page_first_(""), page_last_(""), doi_(""), pubmed_(0),
-    year_(0), title_("") {};
-
+    year_(0), title_(""), book_publisher_(""), book_publisher_city_("") {};
   /// \brief Set ID
   ///
   /// \param id ID
@@ -463,12 +469,35 @@ public:
   /// \return last page
   String GetPageLast() const { return page_last_; }
 
+  /// \brief Set the publisher for a book
+  ///
+  /// \param publisher
+  void SetBookPublisher(String publisher) { book_publisher_ = publisher; }
+
+  /// \brief Get the publisher of a book
+  ///
+  /// \return publisher
+  String GetBookPublisher() const { return book_publisher_; }
+
+  /// \brief Set the publisher city for a book
+  ///
+  /// \param publisher_city
+  void SetBookPublisherCity(String publisher_city) {
+    book_publisher_city_ = publisher_city;
+  }
+
+  /// \brief Get the publisher city of a book
+  ///
+  /// \return publisher_city
+  String GetBookPublisherCity() const { return book_publisher_city_; }
+
+//book_publisher_city_
+
   /// \brief Set the DOI of a document
   ///
   /// \param doi
   void SetDOI(String doi) { doi_ = doi; }
 
-
   /// \brief Get the DOI of a document
   ///
   /// \return DOI
@@ -506,6 +535,54 @@ public:
   /// \return title
   String GetTitle() const { return title_; }
 
+  /// \brief Set the type of a publication
+  ///
+  /// \param publication_type
+  void SetCitationType(MMCifInfoCType publication_type) {
+    where_ = publication_type;
+  }
+
+  /// \brief Set the type of a publication to journal
+  void SetCitationTypeJournal() {
+    where_ = MMCifInfoCitation::JOURNAL;
+  }
+
+  /// \brief Set the type of a publication to book
+  void SetCitationTypeBook() {
+    where_ = MMCifInfoCitation::BOOK;
+  }
+
+  /// \brief Set the type of a publication to unknown
+  void SetCitationTypeUnknown() {
+    where_ = MMCifInfoCitation::UNKNOWN;
+  }
+
+  /// \brief Get the type of a publication
+  ///
+  /// \return citation type
+  MMCifInfoCType GetCitationType() const { return where_; }
+
+  /// \brief Check a citation to be published in a journal
+  ///
+  /// \return true or false
+  bool IsCitationTypeJournal() const {
+    return where_ == MMCifInfoCitation::JOURNAL;
+  }
+
+  /// \brief Check a citation to be published in a book
+  ///
+  /// \return true or false
+  bool IsCitationTypeBook() const {
+    return where_ == MMCifInfoCitation::BOOK;
+  }
+
+  /// \brief Check if the citation type is unknow
+  ///
+  /// \return true or false
+  bool IsCitationTypeUnknown() const {
+    return where_ == MMCifInfoCitation::UNKNOWN;
+  }
+
   /// \brief Set the list of authors
   ///
   /// \param list
@@ -562,6 +639,18 @@ public:
         StringRef(cit.title_.c_str(), cit.title_.length())) {
       return false;
     }
+    if (StringRef(this->book_publisher_.c_str(),
+                  this->book_publisher_.length()) !=
+        StringRef(cit.book_publisher_.c_str(),
+                  cit.book_publisher_.length())) {
+      return false;
+    }
+    if (StringRef(this->book_publisher_city_.c_str(),
+                  this->book_publisher_city_.length()) !=
+        StringRef(cit.book_publisher_city_.c_str(),
+                  cit.book_publisher_city_.length())) {
+      return false;
+    }
     if (this->authors_ != cit.authors_) {
       return false;
     }
@@ -574,26 +663,21 @@ public:
   }
 
 private:
-  /// \enum types of citations
-  typedef enum {
-    JOURNAL,
-    BOOK,
-    UNKNOWN
-  } MMCifInfoCType;
-
-  String              id_;           ///< internal identifier
-  MMCifInfoCType      where_;        ///< journal or book?
-  String              cas_;          ///< CAS identifier
-  String              isbn_;         ///< ISBN no. of medium
-  String              published_in_; ///< book title or full journal name
-  String              volume_;       ///< journal volume
-  String              page_first_;   ///< first page
-  String              page_last_;    ///< last page
-  String              doi_;          ///< DOI identifier
-  int                 pubmed_;       ///< accession no.
-  int                 year_;         ///< year of publication
-  String              title_;        ///< title of the publication
-  std::vector<String> authors_;       ///< author information
+  String              id_;                  ///< internal identifier
+  MMCifInfoCType      where_;               ///< journal or book?
+  String              cas_;                 ///< CAS identifier
+  String              isbn_;                ///< ISBN no. of medium
+  String              published_in_;        ///< book title or journal name
+  String              volume_;              ///< journal volume
+  String              page_first_;          ///< first page
+  String              page_last_;           ///< last page
+  String              doi_;                 ///< DOI identifier
+  int                 pubmed_;              ///< accession no.
+  int                 year_;                ///< year of publication
+  String              title_;               ///< title of the publication
+  String              book_publisher_;      ///< name of publisher
+  String              book_publisher_city_; ///< location of publisher
+  std::vector<String> authors_;             ///< author information
 };
 
 /// \brief container class for information on obsolete entries
diff --git a/modules/io/src/mol/mmcif_reader.cc b/modules/io/src/mol/mmcif_reader.cc
index 905e28442bab8bd68300426bdeb99df8c6d4cb0e..5ed09e1edbec1cb892781eeffc5cd54feccb5d80 100644
--- a/modules/io/src/mol/mmcif_reader.cc
+++ b/modules/io/src/mol/mmcif_reader.cc
@@ -97,26 +97,12 @@ void MMCifReader::SetRestrictChains(const String& restrict_chains)
   restrict_chains_ = restrict_chains;
 }
 
-bool MMCifReader::IsValidPDBIdent(const StringRef& pdbid)
-{
-  if (pdbid.length() == PDBID_LEN && isdigit(pdbid[0])) {
-    return true;
-  }
-  return false;
-}
-
 bool MMCifReader::OnBeginData(const StringRef& data_name) 
 {
   LOG_DEBUG("MCIFFReader: " << profile_);
   Profile profile_import("MMCifReader::OnBeginData");
 
-  // check for PDB id
-  if (!this->IsValidPDBIdent(data_name)) {
-    throw IOException(this->FormatDiagnostic(STAR_DIAG_ERROR,
-                         "No valid PDB id found for data block, read instead \'"
-                                             + data_name.str() + "\'",
-                                             this->GetCurrentLinenum()));
-  }
+  // IDs in mmCIF files can be any string, so no restrictions here
 
   this->ClearState();
 
@@ -191,6 +177,8 @@ bool MMCifReader::OnBeginLoop(const StarLoopDesc& header)
     indices_[ABSTRACT_ID_CAS]         = header.GetIndex("abstract_id_CAS");
     indices_[BOOK_ID_ISBN]            = header.GetIndex("book_id_ISBN");
     indices_[BOOK_TITLE]              = header.GetIndex("book_title");
+    indices_[BOOK_PUBLISHER]          = header.GetIndex("book_publisher");
+    indices_[BOOK_PUBLISHER_CITY]     = header.GetIndex("book_publisher_city");
     indices_[JOURNAL_ABBREV]          = header.GetIndex("journal_abbrev");
     indices_[YEAR]                    = header.GetIndex("year");
     indices_[TITLE]                   = header.GetIndex("title");
@@ -509,6 +497,9 @@ void MMCifReader::ParseAndAddAtom(const std::vector<StringRef>& columns)
 
   if(!curr_residue_) { // unit test
     update_residue=true;
+    subst_res_id_ = cif_chain_name +
+                    columns[indices_[AUTH_SEQ_ID]].str() +
+                    columns[indices_[PDBX_PDB_INS_CODE]].str();
   } else if (!valid_res_num) {
     if (indices_[AUTH_SEQ_ID] != -1 &&
         indices_[PDBX_PDB_INS_CODE] != -1) {
@@ -794,31 +785,35 @@ void MMCifReader::ParseCitation(const std::vector<StringRef>& columns)
       cit.SetISBN(columns[indices_[BOOK_ID_ISBN]].str());
     }
   }
+  if (indices_[JOURNAL_ABBREV] != -1) {
+    if ((columns[indices_[JOURNAL_ABBREV]] != StringRef(".", 1)) &&
+        (columns[indices_[JOURNAL_ABBREV]][0] != '?')) {
+          cit.SetPublishedIn(columns[indices_[JOURNAL_ABBREV]].str());
+          cit.SetCitationTypeJournal();
+        }
+  }
   if (indices_[BOOK_TITLE] != -1) {
     // this is only set in few PDB entries and RCSB overrides it with
     // the journal_abbrev for their citations
     // -> as of August 1, 2017, 5 entries known: 5b1j, 5b1k, 5fax, 5fbz, 5ffn
     //    -> all those have journal_abbrev set
     if ((columns[indices_[BOOK_TITLE]] != StringRef(".", 1)) &&
-        (columns[indices_[BOOK_TITLE]][0]!='?')) {
+        (columns[indices_[BOOK_TITLE]][0] != '?')) {
+      // This will override published_in if already set by journal_abbrev. We
+      // consider this OK for now since usually the book title is copied to
+      // the journal_abbrev attribute.
       cit.SetPublishedIn(columns[indices_[BOOK_TITLE]].str());
-    }
-  }
-  if (indices_[JOURNAL_ABBREV] != -1) {
-    if (columns[indices_[JOURNAL_ABBREV]] != StringRef(".", 1)) {
-      const String journal_abbrev = columns[indices_[JOURNAL_ABBREV]].str();
-      const String published_in = cit.GetPublishedIn();
-      if (published_in.length() > 0 && published_in != journal_abbrev) {
-        LOG_WARNING(this->FormatDiagnostic(STAR_DIAG_WARNING,
-                                           "The 'published_in' field was "
-                                           "already set by citation.book_title "
-                                           "'" + published_in + "'! "
-                                           "This will be overwritten by "
-                                           "citation.journal_abbrev '" +
-                                           journal_abbrev + "'.",
-                                           this->GetCurrentLinenum()));
+      cit.SetCitationTypeBook();
+      
+      // In theory, book_publisher and book_publisher_city are only set for
+      // books and book chapters, so we only try to fetch them if the citation
+      // type points to book.
+      if (indices_[BOOK_PUBLISHER] != -1) {
+        cit.SetBookPublisher(columns[indices_[BOOK_PUBLISHER]].str());
+      }
+      if (indices_[BOOK_PUBLISHER_CITY] != -1) {
+        cit.SetBookPublisherCity(columns[indices_[BOOK_PUBLISHER_CITY]].str());
       }
-      cit.SetPublishedIn(journal_abbrev);
     }
   }
   if (indices_[JOURNAL_VOLUME] != -1) {
diff --git a/modules/io/src/mol/mmcif_reader.hh b/modules/io/src/mol/mmcif_reader.hh
index edd249f38755077ac6b4669cec838de3d69e04c0..4a58d567b866815f7f27ff3ff720f718dd36ab2f 100644
--- a/modules/io/src/mol/mmcif_reader.hh
+++ b/modules/io/src/mol/mmcif_reader.hh
@@ -187,13 +187,6 @@ protected:
      }
   } // tested
 
-  /// \brief Check a PDB id to be of length 4 and start with a digit
-  ///
-  /// \param pdbid putative PDB id
-  ///
-  /// \return true for a valid id, false otherwise
-  bool IsValidPDBIdent(const StringRef& pdbid);
-
   /// \brief fetch values identifying atoms
   ///
   /// \param[in]  columns data row
@@ -353,8 +346,7 @@ protected:
 private:
   /// \enum magic numbers of this class
   typedef enum {
-    PDBID_LEN=4,         ///< length of a PDB id
-    MAX_ITEMS_IN_ROW=18, ///< count for possible items in a loop row
+    MAX_ITEMS_IN_ROW=18 ///< count for possible items in a loop row
   } MMCifMagicNos;
 
   /// \enum items of the atom_site category
@@ -400,6 +392,8 @@ private:
     ABSTRACT_ID_CAS,              ///< CAS identifier
     BOOK_ID_ISBN,                 ///< ISBN code assigned, if book cited
     BOOK_TITLE,                   ///< title of book storing the citation
+    BOOK_PUBLISHER,               ///< name of publisher f a book
+    BOOK_PUBLISHER_CITY,          ///< location of a publisher of a book
     JOURNAL_ABBREV,               ///< abbreviated journal title for articles
     JOURNAL_VOLUME,               ///< volume of cited journal
     PAGE_FIRST,                   ///< first page of citation
diff --git a/modules/io/tests/test_io_mmcif.py b/modules/io/tests/test_io_mmcif.py
index c7cb807bcca47a281aaa0b9e46b3a41724a6dcf9..dfd12ee30a384b35ca5bcf0e545c9a3528624396 100644
--- a/modules/io/tests/test_io_mmcif.py
+++ b/modules/io/tests/test_io_mmcif.py
@@ -40,6 +40,15 @@ class TestMMCifInfo(unittest.TestCase):
     # test title setting/ getting
     c.SetTitle('Foo')
     self.assertEquals(c.GetTitle(), 'Foo')
+    # test book_publisher set & get
+    c.SetBookPublisher("Hugo")
+    self.assertEquals(c.GetBookPublisher(), "Hugo")
+    # test book_publisher_city set & get
+    c.SetBookPublisherCity("Basel")
+    self.assertEquals(c.book_publisher_city, "Basel")
+    # test citation type
+    self.assertTrue(c.IsCitationTypeUnknown())
+    self.assertEquals(c.citation_type, io.MMCifInfoCType.Unknown)
     # test auhtors setting/ getting
     s = ost.StringList()
     s.append('Foo')
diff --git a/modules/io/tests/test_mmcif_info.cc b/modules/io/tests/test_mmcif_info.cc
index 2eb111336ddca9de0379430b91ba5b9906dfad9d..6707ff930cf29eded775b6f85d3ad57c0dc19cb4 100644
--- a/modules/io/tests/test_mmcif_info.cc
+++ b/modules/io/tests/test_mmcif_info.cc
@@ -72,7 +72,10 @@ BOOST_AUTO_TEST_CASE(mmcif_info_citation)
   cit.SetPubMed(815);
   cit.SetYear(815);
   cit.SetTitle("Foo");
+  cit.SetBookPublisher("Brackelmann and Sons");
+  cit.SetBookPublisherCity("Stenkelfeld");
   cit.SetAuthorList(author_list);
+  cit.SetCitationType(MMCifInfoCitation::JOURNAL);
   author_list.clear();
 
   BOOST_CHECK(cit.GetID() == "ID");
@@ -86,9 +89,23 @@ BOOST_AUTO_TEST_CASE(mmcif_info_citation)
   BOOST_CHECK(cit.GetPubMed() == 815);
   BOOST_CHECK(cit.GetYear() == 815);
   BOOST_CHECK(cit.GetTitle() == "Foo");
+  BOOST_CHECK(cit.GetBookPublisher() == "Brackelmann and Sons");
+  BOOST_CHECK(cit.GetBookPublisherCity() == "Stenkelfeld");
+  BOOST_CHECK(cit.GetCitationType() == MMCifInfoCitation::JOURNAL);
+  BOOST_CHECK(cit.IsCitationTypeJournal() == true);
+  BOOST_CHECK(cit.IsCitationTypeBook() == false);
+  BOOST_CHECK(cit.IsCitationTypeUnknown() == false);
   author_list = cit.GetAuthorList();
   BOOST_CHECK(author_list.back() == "Kabel, H.");
 
+  // checking all possible variants of citation type
+  cit.SetCitationTypeJournal();
+  BOOST_CHECK(cit.IsCitationTypeJournal() == true);
+  cit.SetCitationTypeBook();
+  BOOST_CHECK(cit.IsCitationTypeBook() == true);
+  cit.SetCitationTypeUnknown();
+  BOOST_CHECK(cit.IsCitationTypeUnknown() == true);
+
   BOOST_TEST_MESSAGE("  done.");
   BOOST_TEST_MESSAGE("  trying to add everything to an info object");
   MMCifInfo info = MMCifInfo();
diff --git a/modules/io/tests/test_mmcif_reader.cc b/modules/io/tests/test_mmcif_reader.cc
index 2cfac7fcef03a65e57d20a6658ad07f90b339ab6..df82b4ec85df1c455da8a5aa09652ac58a4b77b3 100644
--- a/modules/io/tests/test_mmcif_reader.cc
+++ b/modules/io/tests/test_mmcif_reader.cc
@@ -51,7 +51,6 @@ public:
 
   using MMCifReader::OnBeginLoop;
   using MMCifReader::OnEndData;
-  using MMCifReader::IsValidPDBIdent;
   using MMCifReader::ParseAtomIdent;
   using MMCifReader::ParseAndAddAtom;
   using MMCifReader::ParseEntity;
@@ -113,29 +112,6 @@ conop::CompoundLibPtr SetDefaultCompoundLib() {
 
 BOOST_AUTO_TEST_SUITE( io );
 
-BOOST_AUTO_TEST_CASE(mmcif_isvalidpdbident)
-{
-  mol::EntityHandle eh=mol::CreateEntity();
-
-  // on changing the tests for a PDB id in mmcif files, extend this unit test
-  BOOST_TEST_MESSAGE("  Running mmcif_isvalidpdbident tests...");
-  std::ifstream s("testfiles/mmcif/atom_site.mmcif");
-  TestMMCifReaderProtected tmmcif_p(s, eh);
-  StringRef id = StringRef("1FOO", 4);
-  BOOST_TEST_MESSAGE("    Testing valid id ('"+ id.str() +"')...");
-  BOOST_CHECK(tmmcif_p.IsValidPDBIdent(id));
-  BOOST_TEST_MESSAGE("    done.");
-  id = StringRef("this is to long for a PDB id", 28);
-  BOOST_TEST_MESSAGE("    Testing oversized PDB id ('"+ id.str() +"')...");
-  BOOST_CHECK(!tmmcif_p.IsValidPDBIdent(id));
-  BOOST_TEST_MESSAGE("    done.");
-  id = StringRef("nFOO", 4);
-  BOOST_TEST_MESSAGE("    Testing PDB id with missing number ('"
-                     + id.str() + "')...");
-  BOOST_CHECK(!tmmcif_p.IsValidPDBIdent(id));
-  BOOST_TEST_MESSAGE("    done.");
-}
-
 BOOST_AUTO_TEST_CASE(mmcif_trystoreidx)
 {
   mol::EntityHandle eh = mol::CreateEntity();
@@ -583,14 +559,18 @@ BOOST_AUTO_TEST_CASE(mmcif_citation_tests)
   tmmcif_h.SetCategory(StringRef("citation", 8));
   tmmcif_h.Add(StringRef("id", 2));
   tmmcif_h.Add(StringRef("year", 4));
+  tmmcif_h.Add(StringRef("book_publisher_city", 19));
   tmmcif_h.Add(StringRef("book_title", 10));
+  tmmcif_h.Add(StringRef("book_publisher", 14));
   tmmcif_h.Add(StringRef("journal_abbrev", 14));
   tmmcif_p.OnBeginLoop(tmmcif_h);
 
   // ensure that we use book_title if no journal given (no RCSB use of this)
   columns.push_back(StringRef("Foo", 3));
   columns.push_back(StringRef("1979", 4));
+  columns.push_back(StringRef("The restaurant", 14));
   columns.push_back(StringRef("The Guide", 9));
+  columns.push_back(StringRef("Doug", 4));
   columns.push_back(StringRef(".", 1));
 
   BOOST_CHECK_NO_THROW(tmmcif_p.ParseCitation(columns));
@@ -598,27 +578,33 @@ BOOST_AUTO_TEST_CASE(mmcif_citation_tests)
   BOOST_CHECK_EQUAL(cit.GetID(), String("Foo"));
   BOOST_CHECK_EQUAL(cit.GetYear(), 1979);
   BOOST_CHECK_EQUAL(cit.GetPublishedIn(), String("The Guide"));
+  BOOST_CHECK_EQUAL(cit.GetBookPublisher(), String("Doug"));
+  BOOST_CHECK_EQUAL(cit.GetBookPublisherCity(), String("The restaurant"));
+  BOOST_CHECK_EQUAL(cit.IsCitationTypeBook(), true);
 
   // ensure that we override book_title if not properly given
   columns.pop_back();
   columns.pop_back();
+  columns.pop_back();
   columns.push_back(StringRef(".", 1));
+  columns.push_back(StringRef("Doug", 4));
   columns.push_back(StringRef("Hitch", 5));
 
   BOOST_CHECK_NO_THROW(tmmcif_p.ParseCitation(columns));
   BOOST_CHECK_EQUAL(tmmcif_p.GetInfo().GetCitations().back().GetPublishedIn(),
                     String("Hitch"));
 
-  // ensure that we override book_title if journal given
-  // (def. behavior on RCSB webpage)
+  // ensure that we override journal if book_title given
+  columns.pop_back();
   columns.pop_back();
   columns.pop_back();
   columns.push_back(StringRef("The Guide", 9));
+  columns.push_back(StringRef("Doug", 4));
   columns.push_back(StringRef("Hitch", 5));
 
   BOOST_CHECK_NO_THROW(tmmcif_p.ParseCitation(columns));
   BOOST_CHECK_EQUAL(tmmcif_p.GetInfo().GetCitations().back().GetPublishedIn(),
-                    String("Hitch"));
+                    String("The Guide"));
 
   BOOST_TEST_MESSAGE("  done.");
 }
@@ -1327,7 +1313,7 @@ BOOST_AUTO_TEST_CASE(mmcif_test_chain_mappings)
   BOOST_TEST_MESSAGE("  Running mmcif_test_chain_mappings tests...");
   
   // check compound lib
-  bool compound_lib_available = SetDefaultCompoundLib();
+  bool compound_lib_available = static_cast<bool>(SetDefaultCompoundLib());
 
   // load data
   mol::EntityHandle eh = mol::CreateEntity();
diff --git a/modules/mol/alg/doc/molalg.rst b/modules/mol/alg/doc/molalg.rst
index 81f3e77643c0eae354f789288741fd2d43de2b94..4636a4ce45ee911c944366545be4bc10947d82ba 100644
--- a/modules/mol/alg/doc/molalg.rst
+++ b/modules/mol/alg/doc/molalg.rst
@@ -559,8 +559,7 @@ Local Distance Test scores (lDDT, DRMSD)
     """Run lDDT from within script."""
     from ost.io import LoadPDB
     from ost.mol.alg import (CleanlDDTReferences,
-    			 lDDTSettings, lDDTScorer)
-    from ost.io import ReadStereoChemicalPropsFile
+                             lDDTSettings, lDDTScorer)
 
     ent_full = LoadPDB('3ia3', remote=True)
     model_view = ent_full.Select('cname=A')
diff --git a/modules/mol/alg/src/filter_clashes.cc b/modules/mol/alg/src/filter_clashes.cc
index 455d1e7dbd8114aeb7ef972cc50691de2e9a0e8c..e0d2bee05a80d0959b3528776d501151f2b388c0 100644
--- a/modules/mol/alg/src/filter_clashes.cc
+++ b/modules/mol/alg/src/filter_clashes.cc
@@ -118,7 +118,7 @@ std::pair<Real,Real> ClashingDistances::GetClashingDistance(const String& ele1,c
   std::map <String,std::pair<Real,Real> >::const_iterator find_ci= min_distance_.find(key);
   if (find_ci == min_distance_.end()) {
       std::stringstream serr;
-      serr << "Entry for distance " << stkey <<  " not found in the parameter table";   
+      serr << "Entry for distance " << key <<  " not found in the parameter table";
       throw Error(serr.str());
   }    
   return find_ci->second;
@@ -220,7 +220,7 @@ StereoChemicalParams FillStereoChemicalParams(const String& header, std::vector<
             if (second_line_str_vec.size()!=4) {
               std::cout << "The number of elements in one of the lines is wrong" << std::endl;
               return StereoChemicalParams();
-            } 
+            }
             StringRef item = second_line_str_vec[0];
             String res = second_line_str_vec[1].str();          
             std::pair<bool,float> parse_value = second_line_str_vec[2].to_float();
@@ -231,13 +231,13 @@ StereoChemicalParams FillStereoChemicalParams(const String& header, std::vector<
             } else {
               std::cout << "One of the values in the third column is not a number" << std::endl;
               return StereoChemicalParams();
-            };
+            }
             if (parse_stddev.first==true) {
               stddev=static_cast<Real>(parse_stddev.second);
             } else {
               std::cout << "One of the values in the fourth column is not a number" << std::endl;
               return StereoChemicalParams();
-            };
+            }
             std::vector<StringRef> split_item = item.split('-');
             String rearranged_item;
             if (split_item.size() == 2) {
@@ -264,7 +264,7 @@ StereoChemicalParams FillStereoChemicalParams(const String& header, std::vector<
             } else {
               std::cout << "One of the strings describing the parameter has the wrong format" << std::endl;
               return StereoChemicalParams();
-            }            
+            }
             table.SetParam(rearranged_item,res,value,stddev);
           }
           line_iter++;
@@ -306,7 +306,7 @@ ClashingDistances FillClashingDistances(std::vector<String>& stereo_chemical_pro
             if (second_line_str_vec.size()!=3) {
               std::cout << "The number of elements in one of the lines is wrong" << std::endl;
               return ClashingDistances();
-            } 
+            }
             String item = second_line_str_vec[0].str();
 
             std::pair<bool,float> parse_value = second_line_str_vec[1].to_float();
@@ -317,7 +317,7 @@ ClashingDistances FillClashingDistances(std::vector<String>& stereo_chemical_pro
             } else {
               std::cout << "One of the distance values is not a number" << std::endl;
               return ClashingDistances();
-            };
+            }
             if (parse_stddev.first==true) {
               stddev=static_cast<Real>(parse_stddev.second);
             } else {
@@ -329,7 +329,7 @@ ClashingDistances FillClashingDistances(std::vector<String>& stereo_chemical_pro
             if (itemsr.size() != 3) {
               std::cout << "One of the strings describing the interacting atoms has the wrong format" << std::endl;
               return ClashingDistances();
-            }  
+            }
             String ele1=eles[0].str();
             String ele2=eles[1].str();
             if (ele2 < ele1) {
@@ -340,7 +340,7 @@ ClashingDistances FillClashingDistances(std::vector<String>& stereo_chemical_pro
           }
           line_iter++;
         }
-      }
+      }  
     }
     line_iter++;    
   }
@@ -421,10 +421,15 @@ std::pair<EntityView,StereoChemistryInfo> CheckStereoChemistry(const EntityView&
               remove_sc=true;
               if (always_remove_bb==true) {
                 remove_bb=true;
-              }
-              String name=atom.GetName();
-              if (name=="CA" || name=="N" || name=="O" || name=="C") {
-                remove_bb=true;
+              } else {
+                // we need to check both atom names since the order is random!
+                // -> for angles and clashes this is not needed
+                String name1 = atom.GetName();
+                String name2 = other_atom.GetName();
+                if (name1=="CA" || name1=="N" || name1=="O" || name1=="C" ||
+                    name2=="CA" || name2=="N" || name2=="O" || name2=="C") {
+                  remove_bb=true;
+                }
               }
             } else {
               LOG_VERBOSE("BOND:" << " " << res.GetChain() << " " << res.GetName() << " " << res.GetNumber() << " " << bond_str << " " << min_length << " " << max_length << " " << blength << " " << zscore << " " << "PASS")
@@ -629,7 +634,7 @@ std::pair<EntityView,ClashingInfo> FilterClashes(const EntityView& ent, const Cl
     
     if (remove_bb) {
       LOG_VERBOSE("ACTION: removing whole residue " << res);
-      res.SetBoolProp("steric_clash",true);
+      res.SetBoolProp("steric_clash_backbone", true);
       continue;
     }
     if (remove_sc) {
@@ -642,7 +647,7 @@ std::pair<EntityView,ClashingInfo> FilterClashes(const EntityView& ent, const Cl
          filtered.AddAtom(atom);
        }
       }
-      res.SetBoolProp("steric_clash",true);
+      res.SetBoolProp("steric_clash_sidechain", true);
       continue;
     }
     filtered.AddResidue(res, ViewAddFlag::INCLUDE_ATOMS);
diff --git a/modules/mol/alg/src/lddt.cc b/modules/mol/alg/src/lddt.cc
index 4a9c095f53eff72b5d5ed9dedebd8c7e28761fb4..a64f5bb192d5abab6777b750786858d1fd477d16 100644
--- a/modules/mol/alg/src/lddt.cc
+++ b/modules/mol/alg/src/lddt.cc
@@ -361,17 +361,17 @@ int main (int argc, char **argv)
     }
 
     // Check consistency
-  for (std::vector<EntityView>::const_iterator ref_list_it = ref_list.begin();
-       ref_list_it != ref_list.end(); ++ref_list_it) {
-    bool cons_check = ResidueNamesMatch(model_view,*ref_list_it, ignore_consistency_checks);
-    if (cons_check==false) {
-      if (ignore_consistency_checks==false) {
-        throw std::runtime_error("Residue names in model and in reference structure(s) are inconsistent.");            
-      } else {
-        LOG_WARNING("Residue names in model and in reference structure(s) are inconsistent.");
-      }   
-    } 
-  }
+    for (std::vector<EntityView>::const_iterator ref_list_it = ref_list.begin();
+         ref_list_it != ref_list.end(); ++ref_list_it) {
+      bool cons_check = ResidueNamesMatch(model_view,*ref_list_it, ignore_consistency_checks);
+      if (cons_check==false) {
+        if (ignore_consistency_checks==false) {
+          throw std::runtime_error("Residue names in model and in reference structure(s) are inconsistent.");
+        } else {
+          LOG_WARNING("Residue names in model and in reference structure(s) are inconsistent.");
+        }
+      }
+    }
 
     // computes the lddt score   
     LocalDistDiffTest(model_view, ref_list, glob_dist_list, settings);
diff --git a/modules/mol/base/src/bond_handle.cc b/modules/mol/base/src/bond_handle.cc
index 3be1b88bd7017c7005e2912a71d689bb398576bc..9a68a860dc5db33eea4a28fdc807407acb889ec1 100644
--- a/modules/mol/base/src/bond_handle.cc
+++ b/modules/mol/base/src/bond_handle.cc
@@ -47,7 +47,7 @@ BondHandle::operator bool() const
 }
 
 bool BondHandle::IsValid() const {
-  return impl_;
+  return static_cast<bool>(impl_);
 }
 
 AtomHandle BondHandle::GetFirst() const
diff --git a/modules/mol/base/src/impl/atom_impl.cc b/modules/mol/base/src/impl/atom_impl.cc
index 7bc585b14662360f400d241ca77e4a48ee8b1184..f0852b73845a7ae3181109154203f40a3435d64c 100644
--- a/modules/mol/base/src/impl/atom_impl.cc
+++ b/modules/mol/base/src/impl/atom_impl.cc
@@ -227,7 +227,7 @@ std::ostream& operator<<(std::ostream& o, const AtomImplPtr ap)
 }
 
 bool ConnectorExists(const AtomImplPtr& a, const AtomImplPtr& b) {
-  return GetConnector(a, b);
+  return static_cast<bool>(GetConnector(a, b));
 }
 
 ConnectorImplP GetConnector(const AtomImplPtr& a, const AtomImplPtr& b) {
diff --git a/modules/mol/base/src/surface_handle.hh b/modules/mol/base/src/surface_handle.hh
index 3524375d36ab919f072d3a8d19fe78d4e74d387b..a11b3a91e9768535716c30e25f5026e8fdc04c44 100644
--- a/modules/mol/base/src/surface_handle.hh
+++ b/modules/mol/base/src/surface_handle.hh
@@ -70,7 +70,7 @@ public:
   // flip normals
   void Invert();
 
-  bool IsValid() const {return impl_;}
+  bool IsValid() const {return static_cast<bool>(impl_);}
 
   bool operator==(const SurfaceHandle& ref) const { return impl_==ref.impl_; }
 
diff --git a/singularity/README.rst b/singularity/README.rst
index f3e57b7760263c762341e99cf562e21593bf132f..f90cf47553a96a3bf9eaab68970fcf91f2b109ff 100644
--- a/singularity/README.rst
+++ b/singularity/README.rst
@@ -9,14 +9,19 @@ In order to build OST Singularity image:
 .. code-block:: bash
 
   cd <OST ROOT>/singularity
-  sudo singularity build ost.img Singularity.1.7.1
+  sudo singularity build ost.img Singularity.1.8.0
 
 .. note::
 
   Running singularity build command requires root permissions (sudo).
 
 One can chose any name for an image. For the purose of this file we will assume
-that the image name is `ost.img`.
+that the image name is ``ost.img``.
+
+Here we only keep the recipe for the most recent version of OpenStructure. To
+build an image for a different version, you can either adapt the
+``OPENSTRUCTURE_VERSION`` variable in the recipe or look in the git history for
+an older recipe.
 
 Available apps
 --------------
@@ -24,7 +29,7 @@ Available apps
 This container includes the following apps:
  * **OST** - OpenStructure binary
  * **IPython** - OST-powered iPython shell
- * **Notebook** - A Jupyter notebook palyground with OST and nglview
+ * **Notebook** - A Jupyter notebook playground with OST and nglview
  * **lDDT** - The Local Distance Difference Test
  * **Molck** - Molecular checker
  * **ChemdictTool** - Creating or update a compound library
@@ -58,5 +63,5 @@ Then (in the same terminal window) to invoke IPython app one can just type:
 
   ost_ipython
 
-To make the alias permanent put it into your `.bashrc` file or whatever file you
-use to store the aliases.
\ No newline at end of file
+To make the alias permanent put it into your ``.bashrc`` file or whatever file
+you use to store the aliases.
diff --git a/singularity/Singularity.1.7.1 b/singularity/Singularity.1.8.0
similarity index 97%
rename from singularity/Singularity.1.7.1
rename to singularity/Singularity.1.8.0
index 8a96d0a8f0ed575e5b2a5c0ad007076cddd03a4b..5a1e2600f40eecf259152800311d4686f124bed0 100644
--- a/singularity/Singularity.1.7.1
+++ b/singularity/Singularity.1.8.0
@@ -15,7 +15,9 @@ ln -sf /bin/bash /bin/sh
 export SRC_FOLDER="/usr/local/src"
 export CPUS_FOR_MAKE=8
 export PYTHONPATH="/usr/local/lib64/python2.7/site-packages:${PYTHONPATH}"
-export OPENSTRUCTURE_VERSION="1.7.1"
+# When changing OPENSTRUCTURE_VERSION make sure to change it also in the
+# environment section of singularity recipe (this file).
+export OPENSTRUCTURE_VERSION="1.8.0"
 export OPENSTRUCTURE_SHARE="/usr/local/share/ost"
 export MSMS_VERSION="2.6.1"
 export OPENMM_VERSION="7.1.1"
@@ -65,7 +67,7 @@ locale-gen en_US.UTF-8
 
 # INSTALL SOME PYTHON PACKAGES GLOBALY
 ######################################
-pip install --upgrade pip && pip install --no-cache-dir numpy==1.10.4 \
+pip install --no-cache-dir numpy==1.10.4 \
                                                         scipy==1.0.0 \
                                                         pandas==0.22.0
 
@@ -79,7 +81,7 @@ virtualenv --system-site-packages $VIRTUALENV_DIR
 # INSTALL REQUIRED PYTHON PACKAGES
 ##################################
 pip install jupyter==1.0.0 \
-            nglview==1.0
+            nglview==1.1.6
 
 # DOWNLOAD AND INSTALL MSMS
 ##############
@@ -110,7 +112,7 @@ fi
 ##############
 cd ${SRC_FOLDER}
 if [ ! -f dssp-${DSSP_VERSION}.tgz ]; then
-    wget ftp://ftp.cmbi.ru.nl/pub/software/dssp/dssp-${DSSP_VERSION}.tgz
+    wget ftp://ftp.cmbi.umcn.nl/pub/molbio/software/dssp-2/dssp-${DSSP_VERSION}.tgz
     tar -xvzf dssp-${DSSP_VERSION}.tgz
     cd dssp-${DSSP_VERSION}
     make -j ${CPUS_FOR_MAKE}
@@ -221,7 +223,7 @@ cd /home
 # ENVIRONMENT
 ##############################################################################
 export OST_ROOT="/usr/local"
-export OPENSTRUCTURE_VERSION="1.7.1"
+export OPENSTRUCTURE_VERSION="1.8.0"
 export PYTHONPATH="/usr/local/lib64/python2.7/site-packages:${PYTHONPATH}"
 export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/local/lib64"
 export QT_X11_NO_MITSHM=1