--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import cPickle
+import sys
+import pdb
+import os
+import os.path
+import math
+
+import pythongrid
+
+from qpalma_main import *
+
+
+def get_slices(dataset_size,num_nodes):
+ all_instances = []
+
+ part = dataset_size / num_nodes
+ begin = 0
+ end = 0
+ for idx in range(1,num_nodes+1):
+
+ if idx == num_nodes:
+ begin = end
+ end = dataset_size
+ else:
+ begin = end
+ end = begin+part
+
+ params = (begin,end)
+
+ all_instances.append(params)
+
+ return all_instances
+
+
+def makeJobs(run,dataset_fn,chunks,param):
+ """
+ """
+
+ jobs=[]
+
+ for current_chunk in chunks:
+ current_job = KybJob(predict,[run,prediction_set,param])
+ current_job.h_vmem = '5.0G'
+ current_job.express = 'True'
+
+ print "job #1: ", j1.nativeSpecification
+
+ jobs.append(j1)
+
+ return jobs
+
+
+def create_and_submit():
+ """
+
+ """
+
+ jp = os.path.join
+
+ run_dir = '/fml/ag-raetsch/home/fabio/tmp/newest_run/alignment/run_enable_quality_scores_+_enable_splice_signals_+_enable_intron_length_+'
+
+ run = cPickle.load(jp(run_dir,'run_obj.pickle'))
+ param = cPickle.load(jp(run_dir,'param_526.pickle'))
+
+ dataset_fn = ''
+ prediction_keys = ''
+
+ num_splits = 10
+ slices = get_slices(prediction_keys,num_splits)
+ chunks = []
+ for slice in slices:
+ chunks.append(prediction_keys[slice[0]:slice[1]])
+
+
+ functionJobs = makeJobs(run,dataset_fn,chunks,param)
+
+ print "output ret field in each job before sending it onto the cluster"
+ for (i, job) in enumerate(functionJobs):
+ print "Job with id: ", i, "- ret: ", job.ret
+
+ print ""
+ print "sending function jobs to cluster"
+ print ""
+
+ #processedFunctionJobs = processJobs(functionJobs)
+
+ print "ret fields AFTER execution on cluster"
+ for (i, job) in enumerate(processedFunctionJobs):
+ print "Job with id: ", i, "- ret: ", job.ret
+
+
+def predict(run,prediction_set,param):
+ """
+
+ """
+
+ qp = QPalma()
+ qp.predict(run,dataset_fn,prediction_keys,param):
+
+
+if __name__ == '__main__':
+ create_and_submit():