From f6c68607fda7d22b85ed38a7b7453f4c1b4e5799 Mon Sep 17 00:00:00 2001 From: Joep de Ligt Date: Thu, 4 Jul 2019 15:25:27 +1200 Subject: [PATCH] Adding HotFix for 'not found' error #11 This is referring to the issue reported here #11. I encountered this error myself and made this quick fix to get the program to finish. @rwick Might be good to incorporate a fix since it currently dies rather suddenly. I haven't figured out yet why the reads (I've got 587 out of 2.4 mil) are not being classified in the first place (the couple that I blasted seem like real data). If I figure it out I'll report in a separate "non classified reads" issue about this. --- deepbinner/bin.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/deepbinner/bin.py b/deepbinner/bin.py index ba298b3..3ed0c69 100644 --- a/deepbinner/bin.py +++ b/deepbinner/bin.py @@ -113,6 +113,8 @@ def write_read_files(reads_filename, classifications, out_filenames, input_type) out_files = {} for class_name, out_file in out_filenames.items(): out_files[class_name] = open(out_file, 'wt') + # HotFix: add a outfile/class_name for reads that are "not found" in classifications (see KeyError a bit down) and issue #11 + out_files['not found'] = open("unfindables.txt", 'wt') count, interval = 0, random.randint(90, 110) with open_func(reads_filename, 'rt') as reads: @@ -190,7 +192,7 @@ def print_summary_and_zip(bin_counts, out_filenames): gzip = 'gzip' print('Gzipping reads:') print(' Barcode Reads File') - class_names = out_filenames.keys() + class_names = list(out_filenames.keys()) # Relates to HotFix, this wasn't working because of "'odict_keys' object has no attribute 'append'" if 'not found' in bin_counts: class_names.append('not found') for class_name in class_names: