@@ -106,7 +106,7 @@ def s3_psub(path):
106106 return db_opts , env , tax_filter_opts , tax_report_opts
107107
108108 def pipeline (self , db , inBams , outReports = None , outReads = None ,
109- lockMemory = None , filterThreshold = None , numThreads = None ):
109+ lockMemory = None , filterThreshold = None , num_threads = None ):
110110 assert outReads is not None or outReports is not None
111111
112112 n_bams = len (inBams )
@@ -116,7 +116,7 @@ def pipeline(self, db, inBams, outReports=None, outReads=None,
116116 raise Exception ("--outReports specified with {} output files, which does not match the number of input bams ({})" .format (len (outReports ), n_bams ))
117117 if outReads and len (outReads ) != n_bams :
118118 raise Exception ("--outReads specified with {} output files, which does not match the number of input bams ({})" .format (len (outReads ), n_bams ))
119- threads = util .misc .sanitize_thread_count (numThreads )
119+ threads = util .misc .sanitize_thread_count (num_threads )
120120
121121 with util .file .fifo (n_pipes ) as pipes :
122122 fastq_pipes = pipes [:n_bams * 2 ]
@@ -183,7 +183,7 @@ def pipeline(self, db, inBams, outReports=None, outReads=None,
183183 raise subprocess .CalledProcessError (bam2fq_ps .returncode , "SamToFastqTool().execute({})" .format (in_bam ))
184184
185185
186- def classify (self , inBam , db , outReads , numThreads = None ):
186+ def classify (self , inBam , db , outReads , num_threads = None ):
187187 """Classify input reads (bam)
188188
189189 Args:
@@ -210,7 +210,7 @@ def classify(self, inBam, db, outReads, numThreads=None):
210210 JVMmemory = picard .jvmMemDefault )
211211
212212 opts = {
213- '--threads' : util .misc .sanitize_thread_count (numThreads ),
213+ '--threads' : util .misc .sanitize_thread_count (num_threads ),
214214 '--fastq-input' : None ,
215215 '--gzip-compressed' : None ,
216216 }
0 commit comments