Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,9 @@ class Generator # rubocop:disable Metrics/ClassLength
# Index_Barcode_Num column, i.e. Z0001 or Z097.
ULTIMA_TAG_GROUPS = {
'Ultima P1' => 1,
'Ultima P2' => 2
'Ultima P2' => 2,
'Ultima P3' => 3,
'Ultima P4' => 4
}.freeze

# Initializes the generator with the given batch.
Expand Down
201 changes: 201 additions & 0 deletions config/default_records/tag_groups/006_ultima_p3_and_p4.wip.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,201 @@
Ultima P3:
visible: true
adapter_type_name: Ultima
tags:
1: CTGCACATTGTAGAT
2: CTCATGGTATATGAT
3: CTGCATGCTGGCGTGAT
4: CGGCACGATGCTGAT
5: CTCTTCATGAGCATGAT
6: CTCATGGAGCATATGAT
7: CATACTGCCTATGAT
8: CAATGCATCTATATGAT
9: CGAGCACAATGCATGAT
10: CGGCAGCAGACTGAT
11: CAACATGCACATCTGAT
12: CGTGCATGGCATCTGAT
13: CTACAGCAATGTGAT
14: CATCAGTCCTGCGAT
15: CAAGCGACATGCGAT
16: CATGCTCAATATCTGAT
17: CGACTCATGCCTGAT
18: CATGGCATACTGCTGAT
19: CAATCGCATCGTGAT
20: CGGCACTGCAGTGAT
21: CGGATCGCATGCGAT
22: CTATTGCTCTGCATGAT
23: CATCTGCGGCACATGAT
24: CATCGATGCAGAATGAT
25: CACATGACCAGCGAT
26: CAATGCTGTAGTGAT
27: CGACATGCACCTGAT
28: CGCAAGCATGTCGAT
29: CATGCTCGGCTGCTGAT
30: CAAGTCTGCTCAGAT
31: CATCATTGATCATCGAT
32: CACTCACAATGCATGAT
33: CATGTGGCTATCATGAT
34: CATGTATGCGCAATGAT
35: CTTGCGCTCTCAGAT
36: CGCGCACAATATGAT
37: CAGCGCACCAGCATGAT
38: CTGCTATCTGGTGAT
39: CGATATCATGGTGAT
40: CATGCATGTGGATAGAT
41: CTCTGCTCCTGCGAT
42: CATCGCACCAGAGAT
43: CAATCTGATATGATGAT
44: CATACTGCAGGAGAT
45: CTTCATGACAGAGAT
46: CTACATCATGGCATGAT
47: CGCTGTTGCATGATGAT
48: CTTCACTCATGAGAT
49: CATGCGCTTACAGAT
50: CAGCATCTCTGCCTGAT
51: CACTGCCATATCATGAT
52: CTCAGCTGCGGCGAT
53: CATGCAGAGCGCCTGAT
54: CAATGCTATCGAGAT
55: CAGATAAGAGCAGAT
56: CAGAGCCACTCAGAT
57: CATGTAATATCAGAT
58: CAGCGCATGATCCTGAT
59: CATAGATAGCCAGAT
60: CACTAGCATGGCGAT
61: CAATAGCATCGAGAT
62: CTTCTGTGAGATGAT
63: CTCTGCTCATTCATGAT
64: CACAATAGCGATGAT
65: CTGTAGGCATCTGAT
66: CAACAGCTCTCTGAT
67: CAGACACAATATGAT
68: CTAGCAAGATCAGAT
69: CTTGAGACATGAGAT
70: CTCAGATGTCCAGAT
71: CAGCCGCATGCACAGAT
72: CTCGCATGTGGAGAT
73: CACATGCATGTCCTGAT
74: CGCGGCTGATATGAT
75: CTGTGACAAGCTGAT
76: CATGCGAGCATGGTGAT
77: CAATACATGATCGAT
78: CATGGACATATGCAGAT
79: CAGCGCGCTGGCATGAT
80: CATGCGTCACCTGAT
81: CTGCATGTTGCTGAGAT
82: CATCCTGCGATAGAT
83: CAATATCTGAGTGAT
84: CTGTGCAGGAGTGAT
85: CGATGTGCCATAGAT
86: CTCACATGGAGAGAT
87: CAGAACATGTCAGAT
88: CGCACAATGCGAGAT
89: CATGCACAGAATGTGAT
90: CGTATGGCAGCTGAT
91: CTGATGGTGCTCATGAT
92: CTCAGCCATGCGATGAT
93: CATGCACAAGATCAGAT
94: CTCTGTTGCTCAGAT
95: CACAGCGCTCCAGAT
96: CACAGAAGATGCGAT

Ultima P4:
visible: true
adapter_type_name: Ultima
tags:
1: CATCATGCTCCGCTGAT
2: CTTGCTATGAGCGAT
3: CATGTATCAGGTGAT
4: CATGCGCAATGTATGAT
5: CATGTGAGCGGTGAT
6: CGCACAAGTCATGAT
7: CATGCAATACATGAGAT
8: CATCTATCAGGCGAT
9: CGAGCATCAGGTGAT
10: CAAGACTGATATGAT
11: CATGCGGAGTGAGAT
12: CAGATATAGCCTGAT
13: CAAGAGAGAGCTGAT
14: CTATCAAGCTCAGAT
15: CAGAATACATGCGAT
16: CTCAAGCGCACAGAT
17: CACACACAACATGAT
18: CTTACATGTCATGAT
19: CAGAAGAGATATGAT
20: CAGAGCGCCGCAGAT
21: CGCTCAACATGCGAT
22: CATGACAGTAATGAT
23: CATGCAATCACATCGAT
24: CAGTTGATACATGAT
25: CTGTGACATAATGAT
26: CATAGTGCTGCAATGAT
27: CACGAGCGCAATGAT
28: CACAATGCAGATGTGAT
29: CTGCGCAGCATCCTGAT
30: CAAGACATCAGAGAT
31: CATGCTTGTGTCATGAT
32: CAGTGCGACAATGAT
33: CATATGGTGACAGAT
34: CAATGATAGTGTGAT
35: CATAGCAGCAACATGAT
36: CATGACACCAGTGAT
37: CAATCTCAGCAGATGAT
38: CTCGATGAGCCTGAT
39: CAACACTCTGCAGAT
40: CTTAGCATCACTGAT
41: CTGTTGTGATCAGAT
42: CAAGTGTGCGCAGAT
43: CATGAGCATGGTCTGAT
44: CACTGATGCATGGAGAT
45: CTTATATCTCATGAT
46: CATCAGCTGAAGATGAT
47: CACACATCCAGCGAT
48: CAACATGTGATCGAT
49: CGGCTATGCATCGAT
50: CAAGTGAGCTGCGAT
51: CAATAGACAGCTGAT
52: CAGTCTGATCCTGAT
53: CTTGCATAGCGTGAT
54: CTCGTGGCATGCATGAT
55: CGCTGCCTGCTGCTGAT
56: CATAGTTCATCTGAT
57: CAATCAGCAGACGAT
58: CATGAGCATGAGGCGAT
59: CACATCAGTGGCGAT
60: CTGCATGCACACCTGAT
61: CTACTGGCAGCTGAT
62: CTATTGATATGAGAT
63: CTCATAGCCTGCATGAT
64: CACATGCTCAATGCGAT
65: CAACTCATATGCGAT
66: CATGGCTGTCTCGAT
67: CATCTAGCCATAGAT
68: CAGTCAACAGCAGAT
69: CTAGCGCAATGAGAT
70: CATCATTAGCGCGAT
71: CGCTTCATGCTGCAGAT
72: CTGATGGCGCGCATGAT
73: CTGATCTATGCAATGAT
74: CACTGAATGTGCGAT
75: CGCAATCTATCTGAT
76: CGGCATGCTGTAGAT
77: CTCATGTGTGGTGAT
78: CTGCATATTGTGATGAT
79: CGCATGTATGGCGAT
80: CATATAACATGCATGAT
81: CATATCTCCTGTGAT
82: CTGATACAGAATGAT
83: CTGTGCGCCATAGAT
84: CAGAGCATGCTAATGAT
85: CATAAGATGCGTGAT
86: CATACAAGAGCTGAT
87: CATGCTGAATGTGTGAT
88: CTCATGATTACTGAT
89: CATGACGCATTAGAT
90: CGTGTGCAACATGAT
91: CGGTGCATATCAGAT
92: CTTCGATGCTGTGAT
93: CAATAGATGTGAGAT
94: CGATGCCATCTCATGAT
95: CGATCACAAGCTGAT
96: CTGTGATGTAATGAT
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
Ultima P3:
:tag_group_name: Ultima P3
:direction_algorithm: TagLayout::InColumns
:walking_algorithm: TagLayout::WalkWellsOfPlate
Ultima P4:
:tag_group_name: Ultima P4
:direction_algorithm: TagLayout::InColumns
:walking_algorithm: TagLayout::WalkWellsOfPlate
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@
# First oligo sequences for the two tag groups.
let(:plate1_first_oligo) { 'CAGCTCGAATGCGAT' }
let(:plate2_first_oligo) { 'CAGTCAGTTGCAGAT' }
let(:plate3_first_oligo) { 'CTGCACATTGTAGAT' }
let(:plate4_first_oligo) { 'CATCATGCTCCGCTGAT' }

# Eagerly create tag groups and tags to get consistent IDs.
let!(:tag_group1) do
Expand All @@ -67,14 +69,28 @@
tg.tags.first.update!(oligo: plate2_first_oligo)
end
end
let(:tag_groups) { [tag_group1, tag_group2] }
let!(:tag_group3) do
create(:tag_group_with_tags, tag_count: 96, name: 'Ultima P3').tap do |tg|
# To test Z0193 matching with the oligo sequence.
tg.tags.first.update!(oligo: plate3_first_oligo)
end
end
let!(:tag_group4) do
create(:tag_group_with_tags, tag_count: 96, name: 'Ultima P4').tap do |tg|
# To test Z0289 matching with the oligo sequence.
tg.tags.first.update!(oligo: plate4_first_oligo)
end
end
let(:tag_groups) { [tag_group1, tag_group2, tag_group3, tag_group4] }

let(:request_type) { create(:ultima_sequencing) }
let(:pipeline) { create(:ultima_sequencing_pipeline, request_types: [request_type]) }
let(:batch) { create(:ultima_sequencing_batch, pipeline:, requests:) }
let(:requests) { [request1, request2] }
let(:requests) { [request1, request2, request3, request4] }
let(:request1) { create(:ultima_sequencing_request, asset: tube1.receptacle, request_type: request_type) }
let(:request2) { create(:ultima_sequencing_request, asset: tube2.receptacle, request_type: request_type) }
let(:request3) { create(:ultima_sequencing_request, asset: tube3.receptacle, request_type: request_type) }
let(:request4) { create(:ultima_sequencing_request, asset: tube4.receptacle, request_type: request_type) }

# Eagerly create tubes with aliquots to get consistent IDs.
let!(:tube1) do
Expand All @@ -91,9 +107,23 @@
create(:event, content: Time.zone.today.to_s, message: 'scanned in', family: 'scanned_into_lab', eventful: tube)
tube
end
let!(:tube3) do
receptacle = create(:receptacle)
tag_group3.tags.first(3).map { |tag| create(:aliquot, tag:, receptacle:) }
tube = create(:multiplexed_library_tube, receptacle:)
create(:event, content: Time.zone.today.to_s, message: 'scanned in', family: 'scanned_into_lab', eventful: tube)
tube
end
let!(:tube4) do
receptacle = create(:receptacle)
tag_group4.tags.first(3).map { |tag| create(:aliquot, tag:, receptacle:) }
tube = create(:multiplexed_library_tube, receptacle:)
create(:event, content: Time.zone.today.to_s, message: 'scanned in', family: 'scanned_into_lab', eventful: tube)
tube
end

# Expected mapping of tag groups to their respective 1-based plate numbers.
let(:tag_group_index_map) { { tag_group1 => 1, tag_group2 => 2 } }
let(:tag_group_index_map) { { tag_group1 => 1, tag_group2 => 2, tag_group3 => 3, tag_group4 => 4 } }

# Expected mapping of tags to their respective 1-based index numbers.
let(:tag_index_map) do
Expand Down Expand Up @@ -169,27 +199,47 @@ def map_description(map_id)
csv = "#{request2.id_wafer_lims}.csv"
"#{folder}/#{csv}"
end
let(:zip_entry3_name) do
folder = "batch_#{batch.id}_sample_sheets"
csv = "#{request3.id_wafer_lims}.csv"
"#{folder}/#{csv}"
end
let(:zip_entry4_name) do
folder = "batch_#{batch.id}_sample_sheets"
csv = "#{request4.id_wafer_lims}.csv"
"#{folder}/#{csv}"
end
# Expected CSV section headers from Zip; to peek at the content.
let(:zip_content1_header) do
"[Header],,,,,,,\r\nBatch #{batch.id} #{tube1.human_barcode},,,,,,,\r\n"
end
let(:zip_content2_header) do
"[Header],,,,,,,\r\nBatch #{batch.id} #{tube2.human_barcode},,,,,,,\r\n"
end
let(:zip_content3_header) do
"[Header],,,,,,,\r\nBatch #{batch.id} #{tube3.human_barcode},,,,,,,\r\n"
end
let(:zip_content4_header) do
"[Header],,,,,,,\r\nBatch #{batch.id} #{tube4.human_barcode},,,,,,,\r\n"
end

it 'generates valid zip entries' do
# Test: The sample manifest (csv file) is generated on user request per pool.
# Test: The name should be uniquely identifiable (file name : batchId_NT_number)
zip_hash = extract_zip(described_class.generate(batch))
expect(zip_hash.keys).to contain_exactly(zip_entry1_name, zip_entry2_name)
expect(zip_hash.keys).to contain_exactly(
zip_entry1_name, zip_entry2_name, zip_entry3_name, zip_entry4_name
)
end

it 'generates valid zip contents' do
# In the header section (free text), add the batchId and the pool number.
zip_hash = extract_zip(described_class.generate(batch))
expect(zip_hash.values).to contain_exactly(
a_string_including(zip_content1_header),
a_string_including(zip_content2_header)
a_string_including(zip_content2_header),
a_string_including(zip_content3_header),
a_string_including(zip_content4_header)
)
end

Expand All @@ -210,19 +260,26 @@ def map_description(map_id)
# Parse the generated CSV for the tubes into rows and columns.
let(:csv1) { CSV.parse(generator.csv_string(request1), row_sep: "\r\n", nil_value: '') }
let(:csv2) { CSV.parse(generator.csv_string(request2), row_sep: "\r\n", nil_value: '') }
let(:csv3) { CSV.parse(generator.csv_string(request3), row_sep: "\r\n", nil_value: '') }
let(:csv4) { CSV.parse(generator.csv_string(request4), row_sep: "\r\n", nil_value: '') }

# Test: Adding study_id column to the existing column (study_id per sample)

# Expected sample rows
let(:csv1_samples) { csv_samples_for(request1) }

let(:csv2_samples) { csv_samples_for(request2) }
let(:csv3_samples) { csv_samples_for(request3) }
let(:csv4_samples) { csv_samples_for(request4) }

it 'generates header sections' do # rubocop:disable RSpec/MultipleExpectations
expect(csv1[0].compact_blank).to eq(generator.class::HEADER_TITLE)
expect(csv1[1].compact_blank).to eq(["Batch #{batch.id} #{tube1.human_barcode}"]) # First CSV
expect(csv1[2].compact_blank).to eq([])
expect(csv2[1].compact_blank).to eq(["Batch #{batch.id} #{tube2.human_barcode}"]) # Second CSV
expect(csv2[2].compact_blank).to eq([])
expect(csv3[1].compact_blank).to eq(["Batch #{batch.id} #{tube3.human_barcode}"]) # Third CSV
expect(csv3[2].compact_blank).to eq([])
expect(csv4[1].compact_blank).to eq(["Batch #{batch.id} #{tube4.human_barcode}"])
end

it 'generates global sections' do
Expand All @@ -239,6 +296,8 @@ def map_description(map_id)
expect(csv1[8].compact_blank).to eq(generator.class::SAMPLES_HEADERS)
expect(csv1[9..]).to eq(csv1_samples) # First CSV
expect(csv2[9..]).to eq(csv2_samples) # Second CSV
expect(csv3[9..]).to eq(csv3_samples) # Third CSV
expect(csv4[9..]).to eq(csv4_samples) # Fourth CSV
end

it 'matches the z-indexes, oligo sequences, and plate numbers' do
Expand All @@ -251,6 +310,16 @@ def map_description(map_id)
expect(csv2[9][2]).to eq('Z0097') # Index_Barcode_Num
expect(csv2[9][3]).to eq(plate2_first_oligo) # Index_Barcode_Sequence
expect(csv2[9][4]).to eq('2') # Barcode_Plate_Num

# Third CSV
expect(csv3[9][2]).to eq('Z0193') # Index_Barcode_Num
expect(csv3[9][3]).to eq(plate3_first_oligo) # Index_Barcode_Sequence
expect(csv3[9][4]).to eq('3') # Barcode_Plate_Num

# Fourth CSV
expect(csv4[9][2]).to eq('Z0289') # Index_Barcode_Num
expect(csv4[9][3]).to eq(plate4_first_oligo) # Index_Barcode_Sequence
expect(csv4[9][4]).to eq('4') # Barcode_Plate_Num
end
end
end
Expand Down
Loading