+++ /dev/null
-#! /usr/bin/env python
-# -*- coding: utf-8 -*-
-
-import os
-import os.path
-import pdb
-import sys
-from optparse import OptionParser
-jp = os.path.join
-
-def parseSettingsFile(filename):
- """
- """
- global_settings = {}
- for line in open(filename):
- if (not line.strip()) or line.startswith('#'):
- pass
- else:
- key, val = line.strip().replace(' ', '').split('=')
- global_settings[key] = val
-
- return global_settings
-
-
-
-def makeSettings(global_settings):
- """
-
- """
- assert os.path.exists(global_settings['result_dir']),'Error: You have to specify a existing result directory!'
- result_dir = global_settings['result_dir']
- global_settings['approximation_dir'] = jp(result_dir, 'approximation')
- global_settings['preproc_dir'] = jp(result_dir, 'preprocessing')
- global_settings['postproc_dir'] = jp(result_dir, 'postprocessing')
- global_settings['prediction_dir'] = jp(result_dir, 'prediction')
- global_settings['training_dir'] = jp(result_dir, 'training')
-
- for dir_name in ['approximation_dir', 'preproc_dir', 'postproc_dir',\
- 'prediction_dir', 'training_dir']:
- try:
- os.mkdir(global_settings[dir_name])
- continue
- except:
- print ('Error: There was a problem generating the subdirectory: %s' % dir_name)
-
- try:
- os.mkdir(global_settings['global_log_fn'])
- except:
- print ('Error: There was a problem generating the logfile %s' % global_settings['global_log_fn'])
-
- return global_settings
-
-
-
-def checkSettings(global_settings):
- for (key, val,) in global_settings.items():
- if key.endswith('_fn'):
- assert os.path.exists(val), 'Error: Path/File %s with value %s does not seem to exist!' % (key,val)
- if key.endswith('_dir'):
- assert os.path.exists(val), 'Error: Path/File %s with value %s does not seem to exist!' % (key,val)
-
- return True
-
-
-
-def parseSettings(filename):
- global_settings = parseSettingsFile(filename)
- global_settings = makeSettings(global_settings)
- assert checkSettings(global_settings),'Check your settings some entries were invalid!'
-
- return global_settings
import gridtools
-from utils import get_slices
+from utils import get_slices,split_file
jp = os.path.join
identify true spliced reads.
"""
- def __init__(self):
- ClusterTask.__init__(self)
+ def __init__(self,settings):
+ ClusterTask.__init__(self,settings)
def CreateJobs(self):
data_fname = jp(result_dir,'map.part_%d'%idx)
result_fname = jp(result_dir,'map.vm.part_%d.heuristic'%idx)
- current_job = KybJob(grid_heuristic.TaskStarter,[run_fname,data_fname,param_fname,result_fname])
+ current_job = KybJob(gridtools.TaskStarter,[run_fname,data_fname,param_fname,result_fname])
current_job.h_vmem = '25.0G'
#current_job.express = 'True'
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Written (W) 2008 Fabio De Bona
+# Copyright (C) 2008 Max-Planck-Society
+#
+# This file contains the main interface to the QPalma pipeline.
+#
#
# This module holds all functions for queries on the dna flat files and the
# splice score files.
--- /dev/null
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Written (W) 2008 Fabio De Bona
+# Copyright (C) 2008 Max-Planck-Society
+
+#
+# This file contains the main interface to the QPalma pipeline.
+#
+
+import os
+import os.path
+import sys
+
+jp = os.path.join
+
+
+def parseSettingsFile(filename):
+ """
+ This function parse all key value pairs from the given filename
+ """
+ global_settings = {}
+ for line in open(filename):
+ if (not line.strip()) or line.startswith('#'):
+ pass
+ else:
+ key, val = line.strip().replace(' ', '').split('=')
+ global_settings[key] = val
+
+ return global_settings
+
+
+def makeSettings(global_settings):
+ """
+
+ """
+ assert os.path.exists(global_settings['result_dir']),'Error: You have to specify a existing result directory!'
+ result_dir = global_settings['result_dir']
+ global_settings['approximation_dir'] = jp(result_dir, 'approximation')
+ global_settings['preproc_dir'] = jp(result_dir, 'preprocessing')
+ global_settings['postproc_dir'] = jp(result_dir, 'postprocessing')
+ global_settings['prediction_dir'] = jp(result_dir, 'prediction')
+ global_settings['training_dir'] = jp(result_dir, 'training')
+
+ for dir_name in ['approximation_dir', 'preproc_dir', 'postproc_dir',\
+ 'prediction_dir', 'training_dir']:
+ try:
+ os.mkdir(global_settings[dir_name])
+ continue
+ except:
+ print ('Error: There was a problem generating the subdirectory: %s' % dir_name)
+
+ try:
+ os.mkdir(global_settings['global_log_fn'])
+ except:
+ print ('Error: There was a problem generating the logfile %s' % global_settings['global_log_fn'])
+
+ return global_settings
+
+
+def checkSettings(global_settings):
+ for (key, val,) in global_settings.items():
+ if key.endswith('_fn'):
+ assert os.path.exists(val), 'Error: Path/File %s with value %s does not seem to exist!' % (key,val)
+ if key.endswith('_dir'):
+ assert os.path.exists(val), 'Error: Path/File %s with value %s does not seem to exist!' % (key,val)
+
+ return True
+
+
+def parseSettings(filename):
+ global_settings = parseSettingsFile(filename)
+ global_settings = makeSettings(global_settings)
+ assert checkSettings(global_settings),'Check your settings some entries were invalid!'
+
+ return global_settings
import pdb
import sys
-from Utils import pprint_alignment
+from qpalma.utils import pprint_alignment
import palma.palma_utils as pu
from palma.output_formating import print_results
from qpalma.gridtools import ApproximationTask,PreprocessingTask
from qpalma.gridtools import AlignmentTask,PostprocessingTask
-from qpalma.SettingsParser import parseSettings
+from SettingsParser import parseSettings
Errormsg = """Usage is: python qpalma_pipeline.py <config filename>"""
"""
self.global_settings = parseSettings(filename)
- pdb.set_trace()
def training(self):
# the matches from the first seed finding run.
approx_task = ApproximationTask(self.global_settings)
- approx_task.createJobs()
- approx_task.submit()
- approx_task.checkIfTaskFinished()
+ approx_task.CreateJobs()
+ approx_task.Submit()
+ approx_task.CheckIfTaskFinished()
# After filtering combine the filtered matches from the first run and the
# found matches from the second run to a full dataset