('\n---', '}') {(Id.Lit_RBrace '}')}('\n---', ',') {(Id.Lit_Comma ',')}('\n---', 'B-{a,b}-E') {(B-) (Id.Lit_LBrace '{') (a) (Id.Lit_Comma ',') (b) (Id.Lit_RBrace '}') (-E)}(word.BracedTree parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:4))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:6)) ] )-- ('\n---', 'B-{a"a",b"b",c"c"}-E') {(B-) (Id.Lit_LBrace '{') (a) (DQ (a)) (Id.Lit_Comma ',') (b) (DQ (b)) (Id.Lit_Comma ',') (c) (DQ (c)) (Id.Lit_RBrace '}') (-E) }(word.BracedTree parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.BracedTuple words: [ (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2)) (double_quoted left: (Token id:Id.Left_DoubleQuote val:'"' span_id:3) parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:4))] spids: [3 5] ) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:7)) (double_quoted left: (Token id:Id.Left_DoubleQuote val:'"' span_id:8) parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:9))] spids: [8 10] ) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:12)) (double_quoted left: (Token id:Id.Left_DoubleQuote val:'"' span_id:13) parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:14))] spids: [13 15] ) ] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:17)) ] )-- ('\n---', 'B-{a,b}--{c,d}-E') {(B-) (Id.Lit_LBrace '{') (a) (Id.Lit_Comma ',') (b) (Id.Lit_RBrace '}') (--) (Id.Lit_LBrace '{') (c) (Id.Lit_Comma ',') (d) (Id.Lit_RBrace '}') (-E) }(word.BracedTree parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:4))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-- span_id:6)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:8))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:d span_id:10))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:12)) ] )-- ('\n---', 'B-{a,b,c,={d,e}}-E') {(B-) (Id.Lit_LBrace '{') (a) (Id.Lit_Comma ',') (b) (Id.Lit_Comma ',') (c) (Id.Lit_Comma ',') (Id.Lit_Equals '=') (Id.Lit_LBrace '{') (d) (Id.Lit_Comma ',') (e) (Id.Lit_RBrace '}') (Id.Lit_RBrace '}') (-E) }(word.BracedTree parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:4))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:6))] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:8)) (word_part.BracedTuple words: [ . (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:d span_id:10))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:e span_id:12))] ) ] ) ] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:15)) ] )('\n---', 'B-{a,={b,c}=,d}-E') {(B-) (Id.Lit_LBrace '{') (a) (Id.Lit_Comma ',') (Id.Lit_Equals '=') (Id.Lit_LBrace '{') (b) (Id.Lit_Comma ',') (c) (Id.Lit_RBrace '}') (Id.Lit_Equals '=') (Id.Lit_Comma ',') (d) (Id.Lit_RBrace '}') (-E) }(word.BracedTree parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2))] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:4)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:6))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:8))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:10)) ] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:d span_id:12))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:14)) ] )(compound_word parts:[(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2))])('\n---', '{a,b,}') {(Id.Lit_LBrace '{') (a) (Id.Lit_Comma ',') (b) (Id.Lit_Comma ',') (Id.Lit_RBrace '}')}(word.BracedTree parts: [ (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:1))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:3))] ) (compound_word) ] ) ] )('\n---', 'hi') {(hi)}(compound_word parts:[(word_part.Literal token:(Token id:Id.Lit_Chars val:hi span_id:0))]) ('\n---', 'B-{a,b}-E') {(B-) (Id.Lit_LBrace '{') (a) (Id.Lit_Comma ',') (b) (Id.Lit_RBrace '}') (-E)}(word.BracedTree parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:4))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:6)) ] )(compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:6)) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:4)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:6)) ] ) ('\n---', 'B-{a,={b,c,d}=,e}-E') {(B-) (Id.Lit_LBrace '{') (a) (Id.Lit_Comma ',') (Id.Lit_Equals '=') (Id.Lit_LBrace '{') (b) (Id.Lit_Comma ',') (c) (Id.Lit_Comma ',') (d) (Id.Lit_RBrace '}') (Id.Lit_Equals '=') (Id.Lit_Comma ',') (e) (Id.Lit_RBrace '}') (-E) }(word.BracedTree parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2))] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:4)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:6))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:8))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:d span_id:10))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:12)) ] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:e span_id:14))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:16)) ] )(compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:16)) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:4)) (word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:6)) (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:12)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:16)) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:4)) (word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:8)) (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:12)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:16)) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:4)) (word_part.Literal token:(Token id:Id.Lit_Chars val:d span_id:10)) (word_part.Literal token:(Token id:Id.Lit_Equals val:'=' span_id:12)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:16)) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Chars val:e span_id:14)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:16)) ] ) ('\n---', 'B-{a,b}-{c,d}-E') {(B-) (Id.Lit_LBrace '{') (a) (Id.Lit_Comma ',') (b) (Id.Lit_RBrace '}') (-) (Id.Lit_LBrace '{') (c) (Id.Lit_Comma ',') (d) (Id.Lit_RBrace '}') (-E) }(word.BracedTree parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:4))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:- span_id:6)) (word_part.BracedTuple words: [ (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:8))] ) (compound_word parts: [(word_part.Literal token:(Token id:Id.Lit_Chars val:d span_id:10))] ) ] ) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:12)) ] )(compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2)) (word_part.Literal token:(Token id:Id.Lit_Chars val:- span_id:6)) (word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:8)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:12)) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Chars val:a span_id:2)) (word_part.Literal token:(Token .'' None '1' None '1..' None '1' (word_part.BracedRange kind:Id.Range_Int start:1 end:3 step:1 spids:[None]) '3' (word_part.BracedRange kind:Id.Range_Int start:3 end:-10 step:-2 spids:[None]) '3..-10..-2..' None 'a' None 'a..' None 'a' (word_part.BracedRange kind:Id.Range_Char start:a end:z step:1 spids:[None]) 'a..z..' None 'z' (word_part.BracedRange kind:Id.Range_Char start:z end:a step:-1 spids:[None]) . ---------------------------------------------------------------------- Ran 3 tests in 0.015s OK id:Id.Lit_Chars val:- span_id:6)) (word_part.Literal token:(Token id:Id.Lit_Chars val:d span_id:10)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:12)) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:4)) (word_part.Literal token:(Token id:Id.Lit_Chars val:- span_id:6)) (word_part.Literal token:(Token id:Id.Lit_Chars val:c span_id:8)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:12)) ] ) (compound_word parts: [ (word_part.Literal token:(Token id:Id.Lit_Chars val:B- span_id:0)) (word_part.Literal token:(Token id:Id.Lit_Chars val:b span_id:4)) (word_part.Literal token:(Token id:Id.Lit_Chars val:- span_id:6)) (word_part.Literal token:(Token id:Id.Lit_Chars val:d span_id:10)) (word_part.Literal token:(Token id:Id.Lit_Chars val:-E span_id:12)) ] )