Skip to content

Commit

Permalink
Fix nick,ligate,and crossover tests
Browse files Browse the repository at this point in the history
  • Loading branch information
UnHumbleBen committed May 16, 2022
1 parent 860be06 commit 06fe222
Showing 1 changed file with 32 additions and 17 deletions.
49 changes: 32 additions & 17 deletions tests/scadnano_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -3269,6 +3269,15 @@ def test_add_nick_then_add_crossovers__6_helix_rectangle(self) -> None:
self.assertIn(scaf, self.origami.strands)

def test_ligate_on_extension_side_should_error(self) -> None:
"""
/
/
[-------[----->
^
|
error to ligate here
"""
design: sc.Design = sc.Design(helices=[sc.Helix(max_offset=100)])
design.draw_strand(0, 0).to(10).extension_3p(5)
design.draw_strand(0, 10).to(20)
Expand All @@ -3281,24 +3290,28 @@ def test_ligate_on_non_extension_side_ok(self) -> None:
\
\
0 --------->[-------->
--------->[-------->
After:
Before:
\
\
0 ------------------->
------------------->
"""
# Setup
design: sc.Design = sc.Design(helices=[sc.Helix(max_offset=100)])
design.draw_strand(0, 0).extension_3p(5).to(10)
design.draw_strand(0, 0).extension_5p(5).to(10)
design.draw_strand(0, 10).to(20)

# Action
design.ligate(0, 10, True)
expected_strand: sc.Strand = sc.Strand([
sc.Extension(5, (-1, -1)),
sc.Domain(0, True, 0, 20)
])

# Verify
self.assertEqual(1, len(design.strands))
self.assertEqual(expected_strand, design.strands[0])
actual_substrands = design.strands[0].domains
self.assertEqual(2, len(actual_substrands))
self.assertEqual(sc.Extension(5), actual_substrands[0])
self.assertEqual(sc.Domain(0, True, 0, 20), actual_substrands[1])

def test_add_full_crossover_extension_ok(self) -> None:
"""
Expand Down Expand Up @@ -3339,7 +3352,7 @@ def test_add_full_crossover_extension_ok(self) -> None:
expected_strand_1: sc.Strand = sc.Strand([
sc.Domain(1, False, 8, 16),
sc.Domain(0, True, 8, 16),
sc.Extension(5, (1, -1))
sc.Extension(5)
])
self.assertEqual(2, len(design.strands))
self.assertIn(expected_strand_0, design.strands)
Expand Down Expand Up @@ -3398,15 +3411,15 @@ def test_add_half_crossover_on_extension_ok(self) -> None:
design: sc.Design = sc.Design(
helices=[sc.Helix(max_offset=100), sc.Helix(max_offset=100)]
)
design.draw_strand(0, 0).extension_3p(5).to(8)
design.draw_strand(0, 0).extension_5p(5).to(8)
design.draw_strand(1, 8).to(0)

# Action
design.add_half_crossover(0, 1, 8, True)
design.add_half_crossover(0, 1, 7, True)

# Validation
expected_strand: sc.Strand = sc.Strand([
sc.Extension(5, (-1, -1)),
sc.Extension(5),
sc.Domain(0, True, 0, 8),
sc.Domain(1, False, 0, 8)
])
Expand Down Expand Up @@ -3435,7 +3448,7 @@ def test_add_half_crossover_on_extension_error(self) -> None:
design: sc.Design = sc.Design(
helices=[sc.Helix(max_offset=100), sc.Helix(max_offset=100)]
)
design.draw_strand(0, 0).extension_3p(5).to(8)
design.draw_strand(0, 0).extension_5p(5).to(8)
design.draw_strand(1, 8).to(0)

with self.assertRaises(sc.IllegalDesignError):
Expand All @@ -3459,7 +3472,7 @@ def test_nick_on_extension(self) -> None:
"""
# Setup
design: sc.Design = sc.Design(helices=[sc.Helix(max_offset=100), sc.Helix(max_offset=100)])
design.draw_strand(0, 0).extension_3p(5).to(8)
design.draw_strand(0, 0).to(8).extension_3p(5)

# Nick
design.add_nick(0, 4, True)
Expand All @@ -3470,7 +3483,7 @@ def test_nick_on_extension(self) -> None:
])
expected_strand2: sc.Strand = sc.Strand([
sc.Domain(0, True, 4, 8),
sc.Extension(5, (1, -1))
sc.Extension(5)
])
self.assertEquals(2, len(design.strands))
self.assertIn(expected_strand1, design.strands)
Expand Down Expand Up @@ -4865,7 +4878,9 @@ def test_from_json_extension_design(self) -> None:
}
"""
design = sc.Design.from_scadnano_json_str(json_str)
self.assertEqual(sc.Extension(5, display_length=1.4, display_angle=50.0), design.strands[0].domains[1])
self.assertEqual(
sc.Extension(5, display_length=1.4, display_angle=50.0),
design.strands[0].domains[1])

def test_to_json_extension_design__extension(self) -> None:
# Setup
Expand Down

0 comments on commit 06fe222

Please sign in to comment.