Skip to content
Snippets Groups Projects
submit_structure_profile_calculations.py 3.75 KiB
import os, math
from promod3 import loop

# path to StructureDB for which you want to calculate the profiles:
# (relative to current working directory!)
structure_db_path = "structure_db.dat"

# path to StructureDB from which you extract the structural information
# if the StructureDB above is very large, you might want to give it a smaller
# database here to limit CPU time. A database with around 5000 chains should 
# be sufficient
# (relative to current working directory!)
structure_db_source_path = "structure_db.dat"

# the profiles that get generated end up here:
# (relative to current working directory!)
out_dir = "profile_dbs"

# number of jobs
num_jobs = 100

# all stdout, stderr and submission_script files will go here
# (relative to current working directory!)
submission_workspace = "submission_workspace"


# path to the pm bash script
# Once we have the latest and greatest ProMod3 module, this should not 
# be necessary anymore and we can simply call pm
pm_path = "/scicore/home/schwede/studga00/prog/promod3_dev/build/stage/bin/pm"


# This is stuff relevant for the submission script
max_runtime = "13:00:00"
membycore = "3G"
my_email = "gabriel.studer@unibas.ch"

# we only need ost and promod3 as soon as they're released...
required_modules = ["Boost/1.53.0-goolf-1.4.10-Python-2.7.5",
                    "Python/2.7.5-goolf-1.4.10",
                    "OpenMM/7.1.1-goolf-1.4.10-Python-2.7.5"]

# they won't be necessary anymore as soon as there are ost
# and promod3 modules
ost_python_path = "/scicore/home/schwede/studga00/prog/ost_dev/build/stage/lib64/python2.7/site-packages"
promod_python_path = "/scicore/home/schwede/studga00/prog/promod3_dev/build/stage/lib64/python2.7/site-packages"


#################################################################
# NO EDITING BELOW THIS POINT (EXCEPT ON DIFFERENT SYSTEMS ;) ) #
#################################################################

if not os.path.exists(submission_workspace):
  os.makedirs(submission_workspace)

if not os.path.exists(out_dir):
  os.makedirs(out_dir)

structure_db = loop.StructureDB.Load(structure_db_path)
num_coords = structure_db.GetNumCoords()
chunk_size = math.ceil(float(num_coords ) / num_jobs)
current_start = 0
current_end = chunk_size

for job_idx in range(num_jobs):

  start = current_start
  end = current_end
  if end > num_coords:
    end = num_coords

  # we estimate the chunk size with a ceil => we might get away with even
  # less jobs...
  if start >= end:
    break

  start = int(start)
  end = int(end)

  current_start += chunk_size
  current_end += chunk_size

  cmd = [pm_path, \
         os.path.join(os.getcwd(),"create_structure_profiles.py"), \
         os.path.join(os.getcwd(),structure_db_path), \
         os.path.join(os.getcwd(),structure_db_source_path), \
         str(start), str(end), \
         os.path.join(os.getcwd(), out_dir, str(job_idx)+".dat")]

  stdout_path = os.path.join(os.getcwd(), submission_workspace, str(job_idx) + ".stdout")
  stderr_path = os.path.join(os.getcwd(), submission_workspace, str(job_idx) + ".stderr")

  content = list()
  content.append("#!/bin/bash")
  content.append("#$ -o " + stdout_path)
  content.append("#$ -e " + stderr_path)
  content.append("#$ -l runtime=" + max_runtime)
  content.append("#$ -l membycore=" + membycore)
  content.append("#$ -m as -M " + my_email)   

  for module in required_modules:
    content.append("ml " + module)

  content.append("export PYTHONPATH=" + ost_python_path + ":PYTHONPATH")
  content.append("export PYTHONPATH=" + promod_python_path + ":PYTHONPATH")
  content.append(' '.join(cmd))

  s_script_path = os.path.join(submission_workspace, "sub_"+str(job_idx)+".sh")
  outfile = open(s_script_path,'w')
  outfile.write('\n'.join(content))
  outfile.close()

  # FIRE!!!
  os.system("qsub "+ s_script_path)