6
|
1 #!/usr/bin/env python
|
|
2 from collections import defaultdict
|
|
3 from collections import OrderedDict
|
|
4 import copy
|
|
5
|
|
6 from intervaltree import IntervalTree
|
|
7
|
|
8 from ragoo_utilities.PAFReader import PAFReader
|
|
9 from ragoo_utilities.SeqReader import SeqReader
|
|
10 from ragoo_utilities.ReadCoverage import ReadCoverage
|
|
11 from ragoo_utilities.ContigAlignment import ContigAlignment
|
|
12 from ragoo_utilities.ContigAlignment import UniqueContigAlignment
|
|
13 from ragoo_utilities.ContigAlignment import LongestContigAlignment
|
|
14 from ragoo_utilities.GFFReader import GFFReader
|
|
15 from ragoo_utilities.utilities import run, log, reverse_complement, read_contigs, read_gz_contigs
|
|
16 from ragoo_utilities.break_chimera import get_ref_parts, cluster_contig_alns, avoid_gff_intervals, update_gff, break_contig, get_intra_contigs
|
|
17
|
|
18
|
|
19 def update_misasm_features(features, breaks, contig, ctg_len):
|
|
20
|
|
21 # Get ctg len from ReadCoverage object
|
|
22 break_list = [0] + sorted(breaks) + [ctg_len]
|
|
23 borders = []
|
|
24 for i in range(len(break_list) - 1):
|
|
25 borders.append((break_list[i], break_list[i+1]))
|
|
26
|
|
27 # Pop the features to be updated
|
|
28 contig_feats = features.pop(contig)
|
|
29
|
|
30 # Initialize lists for new broken contig headers
|
|
31 for i in range(len(borders)):
|
|
32 features[contig + '_misasm_break:' + str(borders[i][0]) + '-' + str(borders[i][1])] = []
|
|
33
|
|
34 t = IntervalTree()
|
|
35 for i in borders:
|
|
36 t[i[0]:i[1]] = i
|
|
37
|
|
38 for i in contig_feats:
|
|
39 query = t[i.start]
|
|
40 assert len(query) == 1
|
|
41 break_start = list(query)[0].begin
|
|
42 break_end = list(query)[0].end
|
|
43 query_border = (break_start, break_end)
|
|
44 break_number = borders.index(query_border)
|
|
45 i.seqname = contig + '_misasm_break:' + str(borders[break_number][0]) + '-' + str(borders[break_number][1])
|
|
46 i.start = i.start - break_start
|
|
47 i.end = i.end - break_start
|
|
48 features[
|
|
49 contig + '_misasm_break:' + str(borders[break_number][0]) + '-' + str(borders[break_number][1])].append(i)
|
|
50
|
|
51 return features
|
|
52
|
|
53
|
|
54 def remove_gff_breaks(gff_ins, breaks):
|
|
55 """
|
|
56 Given a list of candidate breakpoints proposed by misassembly correction, remove any such break points that
|
|
57 fall within the interval of a gff feature. This should be called once per contig.
|
|
58 :param gff_ins: List of GFFLines
|
|
59 :param breaks: candidate break points
|
|
60 :return:
|
|
61 """
|
|
62 # Make an interval tree from the intervals of the gff lines
|
|
63 t = IntervalTree()
|
|
64 for line in gff_ins:
|
|
65 # If the interval is one bp long, skip
|
|
66 if line.start == line.end:
|
|
67 continue
|
|
68 t[line.start:line.end] = (line.start, line.end)
|
|
69
|
|
70 return [i for i in breaks if not t[i]]
|
|
71
|
|
72
|
|
73 def write_misasm_broken_ctgs(contigs_file, breaks, out_prefix, in_gff=None, in_gff_name=None):
|
|
74 current_path = os.getcwd()
|
|
75 os.chdir('ctg_alignments')
|
|
76
|
|
77 if in_gff and in_gff_name:
|
|
78 with open(in_gff_name, 'w') as f:
|
|
79 for i in in_gff.keys():
|
|
80 for j in in_gff[i]:
|
|
81 f.write(str(j) + '\n')
|
|
82
|
|
83 x = SeqReader("../../" + contigs_file)
|
|
84 f = open(out_prefix + ".misasm.break.fa", 'w')
|
|
85 for header, seq in x.parse_fasta():
|
|
86 header = header[1:]
|
|
87 if header not in breaks:
|
|
88 f.write(">" + header + "\n")
|
|
89 f.write(seq + "\n")
|
|
90 else:
|
|
91 # Break the contig
|
|
92 ctg_len = len(seq)
|
|
93 break_list = [0] + sorted(breaks[header]) + [ctg_len]
|
|
94 for i in range(len(break_list) - 1):
|
|
95 f.write(">" + header + "_misasm_break:" + str(break_list[i]) + "-" + str(break_list[i+1]) + "\n")
|
|
96 f.write(seq[break_list[i]:break_list[i+1]] + "\n")
|
|
97 os.chdir(current_path)
|
|
98
|
|
99
|
|
100 def align_misasm_broken(out_prefix):
|
|
101 current_path = os.getcwd()
|
|
102 os.chdir('ctg_alignments')
|
|
103
|
|
104 ctgs_file = out_prefix + ".misasm.break.fa"
|
|
105 cmd = '{} -k19 -w19 -t{} ../../{} {} ' \
|
|
106 '> contigs_brk_against_ref.paf 2> contigs_brk_against_ref.paf.log'.format(minimap_path, t, reference_file,
|
|
107 ctgs_file)
|
|
108 if not os.path.isfile('contigs_brk_against_ref.paf'):
|
|
109 run(cmd)
|
|
110 os.chdir(current_path)
|
|
111
|
|
112
|
|
113 def write_contig_clusters(unique_dict, thresh, skip_list):
|
|
114 # Get a list of all chromosomes
|
|
115 all_chroms = set([unique_dict[i].ref_chrom for i in unique_dict.keys()])
|
|
116 current_path = os.getcwd()
|
|
117 output_path = current_path + '/groupings'
|
|
118 if not os.path.exists(output_path):
|
|
119 os.makedirs(output_path)
|
|
120
|
|
121 os.chdir('groupings')
|
|
122 for i in all_chroms:
|
|
123 open(i + '_contigs.txt', 'w').close()
|
|
124
|
|
125 for i in unique_dict.keys():
|
|
126 this_chr = unique_dict[i].ref_chrom
|
|
127 this_confidence = unique_dict[i].confidence
|
|
128 if this_confidence > thresh:
|
|
129 if not i in skip_list:
|
|
130 file_name = str(this_chr) + '_contigs.txt'
|
|
131 with open(file_name, 'a') as f:
|
|
132 f.write(i + '\t' + str(this_confidence) + '\n')
|
|
133 os.chdir(current_path)
|
|
134
|
|
135
|
|
136 def clean_alignments(in_alns, l=10000, in_exclude_file='', uniq_anchor_filter=False, merge=False):
|
|
137 # Exclude alignments to undesired reference headers and filter alignment lengths.
|
|
138 exclude_list = []
|
|
139 if in_exclude_file:
|
|
140 with open('../' + in_exclude_file) as f:
|
|
141 for line in f:
|
|
142 exclude_list.append(line.rstrip().replace('>', '').split()[0])
|
|
143
|
|
144 empty_headers = []
|
|
145 for header in in_alns.keys():
|
|
146 in_alns[header].exclude_ref_chroms(exclude_list)
|
|
147 in_alns[header].filter_lengths(l)
|
|
148 if uniq_anchor_filter:
|
|
149 in_alns[header].unique_anchor_filter()
|
|
150
|
|
151 if merge:
|
|
152 in_alns[header].merge_alns()
|
|
153
|
|
154 # Check if our filtering has removed all alignments for a contig
|
|
155 if len(in_alns[header].ref_headers) == 0:
|
|
156 empty_headers.append(header)
|
|
157
|
|
158 for header in empty_headers:
|
|
159 in_alns.pop(header)
|
|
160 return in_alns
|
|
161
|
|
162
|
|
163 def read_paf_alignments(in_paf):
|
|
164 # Read in PAF alignments
|
|
165 # Initialize a dictionary where key is contig header, and value is ContigAlignment.
|
|
166 alns = dict()
|
|
167 x = PAFReader(in_paf)
|
|
168 for paf_line in x.parse_paf():
|
|
169 if paf_line.contig in alns:
|
|
170 alns[paf_line.contig].add_alignment(paf_line)
|
|
171 else:
|
|
172 alns[paf_line.contig] = ContigAlignment(paf_line.contig)
|
|
173 alns[paf_line.contig].add_alignment(paf_line)
|
|
174 return alns
|
|
175
|
|
176
|
|
177 def get_contigs_from_groupings(in_file):
|
|
178 contigs = []
|
|
179 with open(in_file) as f:
|
|
180 for line in f:
|
|
181 contigs.append(line.split('\t')[0])
|
|
182 return contigs
|
|
183
|
|
184
|
|
185 def get_location_confidence(in_ctg_alns):
|
|
186 # Use interval tree to get all alignments with the reference span
|
|
187 # Go through each of them and if any start is less than the min_pos or any end is greater than
|
|
188 # the max_pos, change the borders to those values. Then use the algorithm that Mike gave me.
|
|
189 min_pos = min(in_ctg_alns.ref_starts)
|
|
190 max_pos = max(in_ctg_alns.ref_ends)
|
|
191 t = IntervalTree()
|
|
192
|
|
193 # Put the reference start and end position for every alignment into the tree
|
|
194 for i in range(len(in_ctg_alns.ref_headers)):
|
|
195 t[in_ctg_alns.ref_starts[i]:in_ctg_alns.ref_ends[i]] = (in_ctg_alns.ref_starts[i], in_ctg_alns.ref_ends[i])
|
|
196
|
|
197 overlaps = t[min_pos:max_pos]
|
|
198 if not overlaps:
|
|
199 return 0
|
|
200
|
|
201 # If any intervals fall beyond the boundaries, replace the start/end with the boundary it exceeds
|
|
202 ovlp_list = [i.data for i in overlaps]
|
|
203 bounded_list = []
|
|
204 for i in ovlp_list:
|
|
205 if i[0] < min_pos:
|
|
206 i[0] = min_pos
|
|
207 if i[1] > max_pos:
|
|
208 i[1] = max_pos
|
|
209 bounded_list.append(i)
|
|
210
|
|
211 # Now can just calculate the total range covered by the intervals
|
|
212 ovlp_range = 0
|
|
213 sorted_intervals = sorted(bounded_list, key=lambda tup: tup[0])
|
|
214 max_end = -1
|
|
215 for j in sorted_intervals:
|
|
216 start_new_terr = max(j[0], max_end)
|
|
217 ovlp_range += max(0, j[1] - start_new_terr)
|
|
218 max_end = max(max_end, j[1])
|
|
219
|
|
220 return ovlp_range / (max_pos - min_pos)
|
|
221
|
|
222
|
|
223 def order_orient_contigs(in_unique_contigs, in_alns):
|
|
224 current_path = os.getcwd()
|
|
225 output_path = current_path + '/orderings'
|
|
226 if not os.path.exists(output_path):
|
|
227 os.makedirs(output_path)
|
|
228
|
|
229 # Get longest alignments
|
|
230 longest_contigs = dict()
|
|
231 for i in in_alns.keys():
|
|
232 # Only consider alignments to the assigned chromosome
|
|
233 uniq_aln = UniqueContigAlignment(in_alns[i])
|
|
234 best_header = uniq_aln.ref_chrom
|
|
235 ctg_alns = copy.deepcopy(in_alns[i])
|
|
236 ctg_alns.filter_ref_chroms([best_header])
|
|
237 longest_contigs[i] = LongestContigAlignment(ctg_alns)
|
|
238
|
|
239 # Save the orientations
|
|
240 final_orientations = dict()
|
|
241 for i in longest_contigs.keys():
|
|
242 final_orientations[i] = longest_contigs[i].strand
|
|
243
|
|
244 # Get the location and orientation confidence scores
|
|
245 orientation_confidence = dict()
|
|
246 location_confidence = dict()
|
|
247 forward_bp = 0
|
|
248 reverse_bp = 0
|
|
249 for i in in_alns.keys():
|
|
250 uniq_aln = UniqueContigAlignment(in_alns[i])
|
|
251 best_header = uniq_aln.ref_chrom
|
|
252 ctg_alns = copy.deepcopy(in_alns[i])
|
|
253 ctg_alns.filter_ref_chroms([best_header])
|
|
254
|
|
255 # Orientation confidence scores
|
|
256 # Every base pair votes for the orientation of the alignment in which it belongs
|
|
257 # Score is # votes for the assigned orientation over all votes
|
|
258 for j in range(len(ctg_alns.ref_headers)):
|
|
259 if ctg_alns.strands[j] == '+':
|
|
260 forward_bp += ctg_alns.aln_lens[j]
|
|
261 else:
|
|
262 reverse_bp += ctg_alns.aln_lens[j]
|
|
263
|
|
264 if final_orientations[i] == '+':
|
|
265 orientation_confidence[i] = forward_bp / (forward_bp + reverse_bp)
|
|
266 else:
|
|
267 orientation_confidence[i] = reverse_bp / (forward_bp + reverse_bp)
|
|
268
|
|
269 forward_bp = 0
|
|
270 reverse_bp = 0
|
|
271
|
|
272 # Location confidence
|
|
273 location_confidence[i] = get_location_confidence(ctg_alns)
|
|
274
|
|
275 all_chroms = set([in_unique_contigs[i].ref_chrom for i in in_unique_contigs.keys()])
|
|
276
|
|
277 for this_chrom in all_chroms:
|
|
278
|
|
279 # Intialize the list of start and end positions w.r.t the query
|
|
280 ref_pos = []
|
|
281
|
|
282 groupings_file = 'groupings/' + this_chrom + '_contigs.txt'
|
|
283 contigs_list = get_contigs_from_groupings(groupings_file)
|
|
284
|
|
285 for i in range(len(contigs_list)):
|
|
286 # There is a scope issue here. Pass this (longest_contigs) to the method explicitly.
|
|
287 ref_pos.append((longest_contigs[contigs_list[i]].ref_start, longest_contigs[contigs_list[i]].ref_end, i))
|
|
288
|
|
289 final_order = [contigs_list[i[2]] for i in sorted(ref_pos)]
|
|
290
|
|
291 # Get ordering confidence
|
|
292 # To do this, get the max and min alignments to this reference chromosome
|
|
293 # Then within that region, what percent of bp are covered
|
|
294
|
|
295 with open('orderings/' + this_chrom + '_orderings.txt', 'w') as out_file:
|
|
296 for i in final_order:
|
|
297 # Also have a scope issue here.
|
|
298 out_file.write(i + '\t' + final_orientations[i] + '\t' + str(location_confidence[i]) + '\t' + str(orientation_confidence[i]) + '\n')
|
|
299
|
|
300
|
|
301 def get_orderings(in_orderings_file):
|
|
302 all_orderings = []
|
|
303 with open(in_orderings_file) as f:
|
|
304 for line in f:
|
|
305 L1 = line.split('\t')
|
|
306 all_orderings.append((L1[0], L1[1].rstrip()))
|
|
307 return all_orderings
|
|
308
|
|
309
|
|
310 def create_pseudomolecules(in_contigs_file, in_unique_contigs, gap_size, chr0=True):
|
|
311 """
|
|
312 Need to make a translation table for easy lift-over.
|
|
313 :param in_contigs_file:
|
|
314 :param in_unique_contigs:
|
|
315 :param gap_size:
|
|
316 :return:
|
|
317 """
|
|
318 # First, read all of the contigs into memory
|
|
319 remaining_contig_headers = []
|
|
320 all_seqs = OrderedDict()
|
|
321 x = SeqReader('../' + in_contigs_file)
|
|
322 if in_contigs_file.endswith(".gz"):
|
|
323 for header, seq in x.parse_gzip_fasta():
|
|
324 remaining_contig_headers.append(header.split(' ')[0])
|
|
325 all_seqs[header.split(' ')[0]] = seq
|
|
326 else:
|
|
327 for header, seq in x.parse_fasta():
|
|
328 remaining_contig_headers.append(header.split(' ')[0])
|
|
329 all_seqs[header.split(' ')[0]] = seq
|
|
330
|
|
331 # Get all reference chromosomes
|
|
332 all_chroms = sorted(list(set([in_unique_contigs[i].ref_chrom for i in in_unique_contigs.keys()])))
|
|
333
|
|
334 # Iterate through each orderings file and store sequence in a dictionary
|
|
335 all_pms = dict()
|
|
336 pad = ''.join('N' for i in range(gap_size))
|
|
337 for this_chrom in all_chroms:
|
|
338 orderings_file = 'orderings/' + this_chrom + '_orderings.txt'
|
|
339 orderings = get_orderings(orderings_file)
|
|
340 if orderings:
|
|
341 seq_list = []
|
|
342 for line in orderings:
|
|
343 # Mark that we have seen this contig
|
|
344 remaining_contig_headers.pop(remaining_contig_headers.index('>' + line[0]))
|
|
345 if line[1] == '+':
|
|
346 seq_list.append(all_seqs['>' + line[0]])
|
|
347 else:
|
|
348 assert line[1] == '-'
|
|
349 seq_list.append(reverse_complement(all_seqs['>' + line[0]]))
|
|
350 all_pms[this_chrom] = pad.join(seq_list)
|
|
351 all_pms[this_chrom] += '\n'
|
|
352
|
|
353 # Get unincorporated sequences and place them in Chr0
|
|
354 if remaining_contig_headers:
|
|
355 if chr0:
|
|
356 chr0_headers = []
|
|
357 chr0_seq_list = []
|
|
358 for header in remaining_contig_headers:
|
|
359 chr0_headers.append(header)
|
|
360 chr0_seq_list.append(all_seqs[header])
|
|
361 all_pms['Chr0'] = pad.join(chr0_seq_list)
|
|
362 all_pms['Chr0'] += '\n'
|
|
363
|
|
364 # Write out the list of chr0 headers
|
|
365 f_chr0_g = open('groupings/Chr0_contigs.txt', 'w')
|
|
366 f_chr0_o = open('orderings/Chr0_orderings.txt', 'w')
|
|
367 for i in chr0_headers:
|
|
368 f_chr0_g.write(i[1:] + "\t" + "0" + '\n')
|
|
369 f_chr0_o.write(i[1:] + '\t' + "+" + '\t' + "0" + '\t' + "0" + '\n')
|
|
370 f_chr0_g.close()
|
|
371 f_chr0_o.close()
|
|
372 else:
|
|
373 # Instead of making a chromosome 0, add the unplaced sequences as is.
|
|
374 for header in remaining_contig_headers:
|
|
375 all_pms[header[1:]] = all_seqs[header] + "\n"
|
|
376 f_chr0_g = open('groupings/' + header[1:] + '_contigs.txt', 'w')
|
|
377 f_chr0_o = open('orderings/' + header[1:] + '_orderings.txt', 'w')
|
|
378 f_chr0_g.write(header[1:] + "\t" + "0" + '\n')
|
|
379 f_chr0_o.write(header[1:] + '\t' + "+" + '\t' + "0" + '\t' + "0" + '\n')
|
|
380 f_chr0_g.close()
|
|
381 f_chr0_o.close()
|
|
382
|
|
383 # Write the final sequences out to a file
|
|
384 with open('ragoo.fasta', 'w') as f:
|
|
385 for out_header in all_pms:
|
|
386 f.write(">" + out_header + "_RaGOO\n")
|
|
387 f.write(all_pms[out_header])
|
|
388
|
|
389
|
|
390 def write_broken_files(in_contigs, in_contigs_name, in_gff=None, in_gff_name=None):
|
|
391 current_path = os.getcwd()
|
|
392 output_path = current_path + '/chimera_break'
|
|
393 if not os.path.exists(output_path):
|
|
394 os.makedirs(output_path)
|
|
395
|
|
396 os.chdir('chimera_break')
|
|
397 if in_gff and in_gff_name:
|
|
398 with open(in_gff_name, 'w') as f:
|
|
399 for i in in_gff.keys():
|
|
400 for j in in_gff[i]:
|
|
401 f.write(str(j) + '\n')
|
|
402
|
|
403 with open(in_contigs_name, 'w') as f:
|
|
404 for i in in_contigs.keys():
|
|
405 f.write('>' + i + '\n')
|
|
406 f.write(in_contigs[i] + '\n')
|
|
407
|
|
408 os.chdir(current_path)
|
|
409
|
|
410
|
|
411 def align_breaks(break_type, m_path, in_reference_file, in_contigs_file, in_num_threads):
|
|
412 current_path = os.getcwd()
|
|
413 os.chdir('chimera_break')
|
|
414 if break_type == 'inter':
|
|
415 cmd = '{} -k19 -w19 -t{} ../../{} {} ' \
|
|
416 '> inter_contigs_against_ref.paf 2> inter_contigs_against_ref.paf.log'.format(m_path, in_num_threads, in_reference_file, in_contigs_file)
|
|
417 if not os.path.isfile('inter_contigs_against_ref.paf'):
|
|
418 run(cmd)
|
|
419 else:
|
|
420 cmd = '{} -k19 -w19 -t{} ../../{} {} ' \
|
|
421 '> intra_contigs_against_ref.paf 2> intra_contigs_against_ref.paf.log'.format(m_path, in_num_threads, in_reference_file, in_contigs_file)
|
|
422 if not os.path.isfile('intra_contigs_against_ref.paf'):
|
|
423 run(cmd)
|
|
424
|
|
425 os.chdir(current_path)
|
|
426
|
|
427
|
|
428 def align_pms(m_path, num_threads, in_reference_file):
|
|
429 current_path = os.getcwd()
|
|
430 output_path = current_path + '/pm_alignments'
|
|
431 if not os.path.exists(output_path):
|
|
432 os.makedirs(output_path)
|
|
433 os.chdir('pm_alignments')
|
|
434
|
|
435 cmd = '{} -ax asm5 --cs -t{} ../../{} {} ' \
|
|
436 '> pm_against_ref.sam 2> pm_contigs_against_ref.sam.log'.format(m_path, num_threads,
|
|
437 in_reference_file, '../ragoo.fasta')
|
|
438 if not os.path.isfile('pm_against_ref.sam'):
|
|
439 run(cmd)
|
|
440
|
|
441 os.chdir(current_path)
|
|
442
|
|
443
|
|
444 def get_SVs(sv_min, sv_max, in_ref_file):
|
|
445 current_path = os.getcwd()
|
|
446 os.chdir('pm_alignments')
|
|
447 # Change this when setup.py is ready. Just call script directly
|
|
448 cmd = 'sam2delta.py pm_against_ref.sam'
|
|
449 if not os.path.isfile('pm_against_ref.sam.delta'):
|
|
450 run(cmd)
|
|
451
|
|
452 cmd_2 = 'Assemblytics_uniq_anchor.py --delta pm_against_ref.sam.delta --unique-length 10000 --out assemblytics_out --keep-small-uniques'
|
|
453 if not os.path.isfile('assemblytics_out.Assemblytics.unique_length_filtered_l10000.delta'):
|
|
454 run(cmd_2)
|
|
455
|
|
456 cmd_3 = 'Assemblytics_between_alignments.pl assemblytics_out.coords.tab %r %r all-chromosomes exclude-longrange bed > assemblytics_out.variants_between_alignments.bed' %(sv_min, sv_max)
|
|
457 if not os.path.isfile('assemblytics_out.variants_between_alignments.bed'):
|
|
458 run(cmd_3)
|
|
459
|
|
460 cmd_4 = 'Assemblytics_within_alignment.py --delta assemblytics_out.Assemblytics.unique_length_filtered_l10000.delta --min %r > assemblytics_out.variants_within_alignments.bed' %(sv_min)
|
|
461 if not os.path.isfile('assemblytics_out.variants_within_alignments.bed'):
|
|
462 run(cmd_4)
|
|
463
|
|
464 header = "reference\tref_start\tref_stop\tID\tsize\tstrand\ttype\tref_gap_size\tquery_gap_size\tquery_coordinates\tmethod\n"
|
|
465
|
|
466 with open('assemblytics_out.variants_between_alignments.bed', 'r')as f1:
|
|
467 b1 = f1.read()
|
|
468
|
|
469 with open('assemblytics_out.variants_within_alignments.bed', 'r') as f2:
|
|
470 b2 = f2.read()
|
|
471
|
|
472 with open('assemblytics_out.Assemblytics_structural_variants.bed', 'w') as f:
|
|
473 f.write(header)
|
|
474 # Might need to add newlines here
|
|
475 f.write(b1)
|
|
476 f.write(b2)
|
|
477
|
|
478 # Filter out SVs caused by gaps
|
|
479 cmd_5 = 'filter_gap_SVs.py ../../%s' %(in_ref_file)
|
|
480 run(cmd_5)
|
|
481
|
|
482 os.chdir(current_path)
|
|
483
|
|
484
|
|
485 def align_reads(m_path, num_threads, in_ctg_file, reads, tech='ont'):
|
|
486 current_path = os.getcwd()
|
|
487 output_path = current_path + '/ctg_alignments'
|
|
488 if not os.path.exists(output_path):
|
|
489 os.makedirs(output_path)
|
|
490 os.chdir('ctg_alignments')
|
|
491
|
|
492 if tech == 'sr':
|
|
493 cmd = '{} -x sr -t{} ../../{} ../../{} ' \
|
|
494 '> reads_against_ctg.paf 2> reads_against_ctg.paf.log'.format(m_path, num_threads, in_ctg_file, reads)
|
|
495 elif tech == 'corr':
|
|
496 cmd = '{} -x asm10 -t{} ../../{} ../../{} ' \
|
|
497 '> reads_against_ctg.paf 2> reads_against_ctg.paf.log'.format(m_path, num_threads, in_ctg_file, reads)
|
|
498 else:
|
|
499 raise ValueError("Only 'sr' or 'corr' are accepted for read type.")
|
|
500
|
|
501 if not os.path.isfile('reads_against_ctg.paf'):
|
|
502 run(cmd)
|
|
503
|
|
504 os.chdir(current_path)
|
|
505
|
|
506
|
|
507 if __name__ == "__main__":
|
|
508 import os
|
|
509 import argparse
|
|
510
|
|
511 parser = argparse.ArgumentParser(description='order and orient contigs according to minimap2 alignments to a reference (v1.1)')
|
|
512 parser.add_argument("contigs", metavar="<contigs.fasta>", type=str, help="fasta file with contigs to be ordered and oriented (gzipped allowed)")
|
|
513 parser.add_argument("reference", metavar="<reference.fasta>", type=str, help="reference fasta file (gzipped allowed)")
|
|
514 #parser.add_argument("-o", metavar="PATH", type=str, default="ragoo_output", help="output directory name")
|
|
515 parser.add_argument("-e", metavar="<exclude.txt>", type=str, default="", help="single column text file of reference headers to ignore")
|
|
516 parser.add_argument("-gff", metavar="<annotations.gff>", type=str, default='', help="lift-over gff features to chimera-broken contigs")
|
|
517 parser.add_argument("-m", metavar="PATH", type=str, default="minimap2", help='path to minimap2 executable')
|
|
518 parser.add_argument("-b", action='store_true', default=False, help="Break chimeric contigs")
|
|
519 parser.add_argument("-R", metavar="<reads.fasta>", type=str, default="", help="Turns on misassembly correction. Align provided reads to the contigs to aid misassembly correction. fastq or fasta allowed. Gzipped files allowed. Turns off '-b'.")
|
|
520 parser.add_argument("-T", metavar="sr", type=str, default="", help="Type of reads provided by '-R'. 'sr' and 'corr' accepted for short reads and error corrected long reads respectively.")
|
|
521 parser.add_argument("-p", metavar="5", type=int, default=5, help=argparse.SUPPRESS)
|
|
522 parser.add_argument("-l", metavar="10000", type=int, default=10000, help=argparse.SUPPRESS)
|
|
523 parser.add_argument("-r", metavar="100000", type=int, default=100000, help="(with -b) this many bp of >1 reference sequence must be covered for a contig to be considered an interchromosomal chimera.")
|
|
524 parser.add_argument("-c", metavar="1000000", type=int, default=1000000, help="(with -b) distance threshold between consecutive alignments with respect to the contig.")
|
|
525 parser.add_argument("-d", metavar="2000000", type=int, default=2000000, help="(with -b) distance threshold between consecutive alignments with respect to the reference.")
|
|
526 parser.add_argument("-t", metavar="3", type=int, default=3, help="Number of threads when running minimap.")
|
|
527 parser.add_argument("-g", metavar="100", type=int, default=100, help="Gap size for padding in pseudomolecules.")
|
|
528 parser.add_argument("-s", action='store_true', default=False, help="Call structural variants")
|
|
529 parser.add_argument("-a", metavar="50", type=int, default=50, help=argparse.SUPPRESS)
|
|
530 parser.add_argument("-f", metavar="10000", type=int, default=10000, help=argparse.SUPPRESS)
|
|
531 parser.add_argument("-i", metavar="0.2", type=float, default=0.2, help="Minimum grouping confidence score needed to be localized.")
|
|
532 parser.add_argument("-j", metavar="<skip.txt>", type=str, default="", help="List of contigs to automatically put in chr0.")
|
|
533 parser.add_argument("-C", action='store_true', default=False, help="Write unplaced contigs individually instead of making a chr0")
|
|
534
|
|
535 # Get the command line arguments
|
|
536 args = parser.parse_args()
|
|
537 contigs_file = args.contigs
|
|
538 reference_file = args.reference
|
|
539 #output_path = args.o
|
|
540 exclude_file = args.e
|
|
541 minimap_path = args.m
|
|
542 break_chimeras = args.b
|
|
543 gff_file = args.gff
|
|
544 min_break_pct = args.p
|
|
545 min_len = args.l
|
|
546 min_range = args.r
|
|
547 intra_wrt_ref_min = args.d
|
|
548 intra_wrt_ctg_min = args.c
|
|
549 t = args.t
|
|
550 g = args.g
|
|
551 call_svs = args.s
|
|
552 min_assemblytics = args.a
|
|
553 max_assemblytics = args.f
|
|
554 group_score_thresh = args.i
|
|
555 skip_file = args.j
|
|
556 corr_reads = args.R
|
|
557 corr_reads_tech = args.T
|
|
558 make_chr0 = not args.C
|
|
559
|
|
560 if corr_reads:
|
|
561 log("Misassembly correction has been turned on. This automatically inactivates chimeric contig correction.")
|
|
562 break_chimeras = False
|
|
563
|
|
564 # Make sure that if -R, -T has been specified
|
|
565 if corr_reads and not corr_reads_tech:
|
|
566 raise ValueError("'-T' must be provided when using -R.")
|
|
567
|
|
568 skip_ctg = []
|
|
569 if skip_file:
|
|
570 with open(skip_file) as f:
|
|
571 for line in f:
|
|
572 skip_ctg.append(line.rstrip())
|
|
573
|
|
574 current_path = os.getcwd()
|
|
575 output_path = current_path + '/ragoo_output'
|
|
576 if not os.path.exists(output_path):
|
|
577 os.makedirs(output_path)
|
|
578 os.chdir(output_path)
|
|
579
|
|
580 # Run minimap2
|
|
581 cmd = '{} -k19 -w19 -t{} ../{} ../{} ' \
|
|
582 '> contigs_against_ref.paf 2> contigs_against_ref.paf.log'.format(minimap_path, t, reference_file, contigs_file)
|
|
583
|
|
584 if not os.path.isfile('contigs_against_ref.paf'):
|
|
585 run(cmd)
|
|
586
|
|
587 # Read in the minimap2 alignments just generated
|
|
588 log('Reading alignments')
|
|
589 alns = read_paf_alignments('contigs_against_ref.paf')
|
|
590 alns = clean_alignments(alns, l=1000, in_exclude_file=exclude_file)
|
|
591
|
|
592 # Process the gff file
|
|
593 if gff_file:
|
|
594 log('Getting gff features')
|
|
595 features = defaultdict(list)
|
|
596 z = GFFReader('../' + gff_file)
|
|
597 for i in z.parse_gff():
|
|
598 features[i.seqname].append(i)
|
|
599
|
|
600 # Break chimeras if desired
|
|
601 if break_chimeras:
|
|
602 # Record how many contigs are broken
|
|
603 total_inter_broken = 0
|
|
604 total_intra_broken = 0
|
|
605
|
|
606 alns = clean_alignments(alns, l=10000, in_exclude_file=exclude_file, uniq_anchor_filter=True)
|
|
607 # Process contigs
|
|
608 log('Getting contigs')
|
|
609 if contigs_file.endswith(".gz"):
|
|
610 contigs_dict = read_gz_contigs('../' + contigs_file)
|
|
611 else:
|
|
612 contigs_dict = read_contigs('../' + contigs_file)
|
|
613
|
|
614 log('Finding interchromosomally chimeric contigs')
|
|
615 all_chimeras = dict()
|
|
616 for i in alns.keys():
|
|
617 ref_parts = get_ref_parts(alns[i], min_len, min_break_pct, min_range)
|
|
618 if len(ref_parts) > 1:
|
|
619 all_chimeras[i] = ref_parts
|
|
620
|
|
621 log('Finding break points and breaking interchromosomally chimeric contigs')
|
|
622 break_intervals = dict()
|
|
623 for i in all_chimeras.keys():
|
|
624 break_intervals[i] = cluster_contig_alns(i, alns, all_chimeras[i], min_len)
|
|
625
|
|
626 # If its just going to break it into the same thing, skip it.
|
|
627 if len(break_intervals[i]) <= 1:
|
|
628 continue
|
|
629
|
|
630 if gff_file:
|
|
631 # If desired, ensure that breakpoints don't disrupt any gff intervals
|
|
632 break_intervals[i] = avoid_gff_intervals(break_intervals[i], features[i])
|
|
633 features = update_gff(features, break_intervals[i], i)
|
|
634
|
|
635 # Break contigs according to the final break points
|
|
636 contigs_dict = break_contig(contigs_dict, i, break_intervals[i])
|
|
637 total_inter_broken += 1
|
|
638
|
|
639 # Next, need to re-align before finding intrachromosomal chimeras
|
|
640 # First, write out the interchromosomal chimera broken fasta
|
|
641 out_inter_fasta = contigs_file[:contigs_file.rfind('.')] + '.inter.chimera.broken.fa'
|
|
642 if gff_file:
|
|
643 out_gff = gff_file[:gff_file.rfind('.')] + '.inter.chimera_broken.gff'
|
|
644 write_broken_files(contigs_dict, out_inter_fasta, features, out_gff)
|
|
645 else:
|
|
646 write_broken_files(contigs_dict, out_inter_fasta)
|
|
647
|
|
648 # Next, realign the chimera broken contigs
|
|
649 align_breaks('inter', minimap_path, reference_file, out_inter_fasta, t)
|
|
650
|
|
651 # Now, use those new alignments for intrachromosomal chimeras
|
|
652 log('Reading interchromosomal chimera broken alignments')
|
|
653 inter_alns = read_paf_alignments('chimera_break/inter_contigs_against_ref.paf')
|
|
654 inter_alns = clean_alignments(inter_alns, l=1000, in_exclude_file=exclude_file)
|
|
655
|
|
656 log('Finding intrachromosomally chimeric contigs')
|
|
657 # Find intrachromosomally chimeric contigs
|
|
658 for i in inter_alns.keys():
|
|
659 intra = get_intra_contigs(inter_alns[i], 15000, intra_wrt_ref_min, intra_wrt_ctg_min)
|
|
660 if intra:
|
|
661 if gff_file:
|
|
662 intra_break_intervals = avoid_gff_intervals(intra[1], features[intra[0]])
|
|
663 else:
|
|
664 intra_break_intervals = intra[1]
|
|
665 # Check if the avoidance of gff intervals pushed the break point to the end of the contig.
|
|
666 if intra_break_intervals[-1][0] == intra_break_intervals[-1][1]:
|
|
667 continue
|
|
668
|
|
669 # break the contigs and update features if desired
|
|
670 contigs_dict = break_contig(contigs_dict, intra[0], intra_break_intervals)
|
|
671 total_intra_broken += 1
|
|
672
|
|
673 if gff_file:
|
|
674 features = update_gff(features, intra_break_intervals, intra[0])
|
|
675
|
|
676 # Write out the intrachromosomal information
|
|
677 out_intra_fasta = contigs_file[:contigs_file.rfind('.')] + '.intra.chimera.broken.fa'
|
|
678 if gff_file:
|
|
679 out_intra_gff = gff_file[:gff_file.rfind('.')] + '.intra.chimera_broken.gff'
|
|
680 write_broken_files(contigs_dict, out_intra_fasta, features, out_intra_gff)
|
|
681 else:
|
|
682 write_broken_files(contigs_dict, out_intra_fasta)
|
|
683
|
|
684 # Re align the contigs
|
|
685 # Next, realign the chimera broken contigs
|
|
686 align_breaks('intra', minimap_path, reference_file, out_intra_fasta, t)
|
|
687
|
|
688 # Read in alignments of intrachromosomal chimeras and proceed with ordering and orientation
|
|
689 log('Reading intrachromosomal chimera broken alignments')
|
|
690 alns = read_paf_alignments('chimera_break/intra_contigs_against_ref.paf')
|
|
691 alns = clean_alignments(alns, l=1000, in_exclude_file=exclude_file)
|
|
692 contigs_file = '/ragoo_output/chimera_break/' + out_intra_fasta
|
|
693 log('The total number of interchromasomally chimeric contigs broken is %r' % total_inter_broken)
|
|
694 log('The total number of intrachromasomally chimeric contigs broken is %r' % total_intra_broken)
|
|
695
|
|
696 # Check if misassembly correction is turned on. This is mutually exclusive with chimeric contig correction
|
|
697 if corr_reads:
|
|
698 # Align the raw reads to the assembly.
|
|
699 log('Aligning raw reads to contigs')
|
|
700 align_reads(minimap_path, t, contigs_file, corr_reads, corr_reads_tech)
|
|
701 log('Computing contig coverage')
|
|
702 cov_map = ReadCoverage('ctg_alignments/reads_against_ctg.paf')
|
|
703 alns = clean_alignments(alns, l=10000, in_exclude_file=exclude_file, uniq_anchor_filter=True, merge=True)
|
|
704
|
|
705 # Get the initial candidate break points.
|
|
706 candidate_breaks = dict()
|
|
707 for i in alns:
|
|
708 candidates = alns[i].get_break_candidates()
|
|
709 if candidates:
|
|
710 candidate_breaks[i] = candidates
|
|
711
|
|
712 # Validate each breakpoint by checking for excessively high or low coverage
|
|
713 # Also, if a gff is provided, check to ensure that we don't break within a gff feature interval
|
|
714 val_candidate_breaks = dict()
|
|
715 for i in candidate_breaks:
|
|
716 candidates = cov_map.check_break_cov(i, candidate_breaks[i])
|
|
717 if gff_file:
|
|
718 candidates = remove_gff_breaks(features[i], candidates)
|
|
719 if candidates:
|
|
720 val_candidate_breaks[i] = list(set(candidates))
|
|
721 if gff_file:
|
|
722 features = update_misasm_features(features, val_candidate_breaks[i], i, cov_map.ctg_lens[i])
|
|
723
|
|
724 # Break the contigs
|
|
725 if gff_file:
|
|
726 out_misasm_gff = gff_file[:gff_file.rfind('.')] + '.misasm.broken.gff'
|
|
727 write_misasm_broken_ctgs(contigs_file, val_candidate_breaks, contigs_file[:contigs_file.rfind('.')], in_gff=features, in_gff_name=out_misasm_gff)
|
|
728 else:
|
|
729 write_misasm_broken_ctgs(contigs_file, val_candidate_breaks, contigs_file[:contigs_file.rfind('.')])
|
|
730
|
|
731 # Align the broken contigs back to the reference
|
|
732 align_misasm_broken(contigs_file[:contigs_file.rfind('.')])
|
|
733 alns = read_paf_alignments('ctg_alignments/contigs_brk_against_ref.paf')
|
|
734 alns = clean_alignments(alns, l=1000, in_exclude_file=exclude_file)
|
|
735 contigs_file = '/ragoo_output/ctg_alignments/' + contigs_file[:contigs_file.rfind('.')] + ".misasm.break.fa"
|
|
736
|
|
737 # Assign each contig to a corresponding reference chromosome.
|
|
738 log('Assigning contigs')
|
|
739 all_unique_contigs = dict()
|
|
740 for i in alns.keys():
|
|
741 all_unique_contigs[i] = UniqueContigAlignment(alns[i])
|
|
742
|
|
743 # Add to this the list of headers that did not make it
|
|
744 write_contig_clusters(all_unique_contigs, group_score_thresh, skip_ctg)
|
|
745
|
|
746 log('Ordering and orienting contigs')
|
|
747 order_orient_contigs(all_unique_contigs, alns)
|
|
748
|
|
749 log('Creating pseudomolecules')
|
|
750 create_pseudomolecules(contigs_file, all_unique_contigs, g, make_chr0)
|
|
751
|
|
752 if call_svs:
|
|
753 log('Aligning pseudomolecules to reference')
|
|
754 align_pms(minimap_path, t, reference_file)
|
|
755
|
|
756 log('Getting structural variants')
|
|
757 get_SVs(min_assemblytics, max_assemblytics, reference_file)
|
|
758
|
|
759 log('goodbye')
|