Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_fn_split():
"""Test if we can get a statement something right."""
one, two = tokenize(code)
assert one.get_invocation() == {
"function": "+",
"args": [
1, 1
]
}
assert two.get_invocation() == {
"function": "fn",
"args": [
"foo", "bar"
]
def test_lex_expression_symbols():
""" Make sure that expressions produce symbols """
objs = tokenize("(foo bar)")
assert objs == [HyExpression([HySymbol("foo"), HySymbol("bar")])]
def test_lex_digit_separators():
assert tokenize("1_000_000") == [HyInteger(1000000)]
assert tokenize("1,000,000") == [HyInteger(1000000)]
assert tokenize("1,000_000") == [HyInteger(1000000)]
assert tokenize("1_000,000") == [HyInteger(1000000)]
assert tokenize("0x_af") == [HyInteger(0xaf)]
assert tokenize("0x,af") == [HyInteger(0xaf)]
assert tokenize("0b_010") == [HyInteger(0b010)]
assert tokenize("0b,010") == [HyInteger(0b010)]
assert tokenize("0o_373") == [HyInteger(0o373)]
assert tokenize("0o,373") == [HyInteger(0o373)]
assert tokenize('1_2.3,4') == [HyFloat(12.34)]
assert tokenize('1_2e3,4') == [HyFloat(12e34)]
assert (tokenize("1,2/3_4") ==
[HyExpression([HySymbol("fraction"),
HyInteger(12), HyInteger(34)])])
assert tokenize("1,0_00j") == [HyComplex(1000j)]
assert tokenize("1,,,,___,____,,__,,2__,,,__") == [HyInteger(12)]
assert (tokenize("_1,,,,___,____,,__,,2__,,,__") ==
[HySymbol("_1,,,,___,____,,__,,2__,,,__")])
assert (tokenize("1,,,,___,____,,__,,2__,q,__") ==
[HySymbol("1,,,,___,____,,__,,2__,q,__")])
def test_dicts():
""" Ensure that we can tokenize a dict. """
objs = tokenize("{foo bar bar baz}")
assert objs == [HyDict(["foo", "bar", "bar", "baz"])]
objs = tokenize("(bar {foo bar bar baz})")
assert objs == [HyExpression([HySymbol("bar"),
HyDict(["foo", "bar",
"bar", "baz"])])]
objs = tokenize("{(foo bar) (baz quux)}")
assert objs == [HyDict([
HyExpression([HySymbol("foo"), HySymbol("bar")]),
HyExpression([HySymbol("baz"), HySymbol("quux")])
])]
assert tokenize("[#_ #_0 1 2]") == [HyList([HyInteger(2)])]
# in HySet
assert tokenize("#{}") == [HySet()]
assert tokenize("#{#_1}") == [HySet()]
assert tokenize("#{0 #_1}") == [HySet([HyInteger(0)])]
assert tokenize("#{#_1 0}") == [HySet([HyInteger(0)])]
# in HyDict
assert tokenize("{}") == [HyDict()]
assert tokenize("{#_1}") == [HyDict()]
assert tokenize("{#_0 1 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
assert tokenize("{1 #_0 2}") == [HyDict([HyInteger(1), HyInteger(2)])]
assert tokenize("{1 2 #_0}") == [HyDict([HyInteger(1), HyInteger(2)])]
# in HyExpression
assert tokenize("()") == [HyExpression()]
assert tokenize("(#_foo)") == [HyExpression()]
assert tokenize("(#_foo bar)") == [HyExpression([HySymbol("bar")])]
assert tokenize("(foo #_bar)") == [HyExpression([HySymbol("foo")])]
assert tokenize("(foo :bar 1)") == [HyExpression([HySymbol("foo"), HyKeyword("bar"), HyInteger(1)])]
assert tokenize("(foo #_:bar 1)") == [HyExpression([HySymbol("foo"), HyInteger(1)])]
assert tokenize("(foo :bar #_1)") == [HyExpression([HySymbol("foo"), HyKeyword("bar")])]
# discard term with nesting
assert tokenize("[1 2 #_[a b c [d e [f g] h]] 3 4]") == [
HyList([HyInteger(1), HyInteger(2), HyInteger(3), HyInteger(4)])
]
# discard with other prefix syntax
assert tokenize("a #_'b c") == [HySymbol("a"), HySymbol("c")]
assert tokenize("a '#_b c") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("c")])]
assert tokenize("a '#_b #_c d") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("d")])]
assert tokenize("a '#_ #_b c d") == [HySymbol("a"), HyExpression([HySymbol("quote"), HySymbol("d")])]
def test_nospace():
""" Ensure we can tokenize without spaces if we have to """
entry = tokenize("(foo(one two))")[0]
assert entry.start_line == 1
assert entry.start_column == 1
assert entry.end_line == 1
assert entry.end_column == 14
entry = entry[1]
assert entry.start_line == 1
assert entry.start_column == 5
assert entry.end_line == 1
assert entry.end_column == 13
def test_dicts():
""" Ensure that we can tokenize a dict. """
objs = tokenize("{foo bar bar baz}")
assert objs == [HyDict(["foo", "bar", "bar", "baz"])]
objs = tokenize("(bar {foo bar bar baz})")
assert objs == [HyExpression([HySymbol("bar"),
HyDict(["foo", "bar",
"bar", "baz"])])]
objs = tokenize("{(foo bar) (baz quux)}")
assert objs == [HyDict([
HyExpression([HySymbol("foo"), HySymbol("bar")]),
HyExpression([HySymbol("baz"), HySymbol("quux")])
])]
def test_lex_nan_and_inf():
assert isnan(tokenize("NaN")[0])
assert tokenize("Nan") == [HySymbol("Nan")]
assert tokenize("nan") == [HySymbol("nan")]
assert tokenize("NAN") == [HySymbol("NAN")]
assert tokenize("Inf") == [HyFloat(float("inf"))]
assert tokenize("inf") == [HySymbol("inf")]
assert tokenize("INF") == [HySymbol("INF")]
assert tokenize("-Inf") == [HyFloat(float("-inf"))]
assert tokenize("-inf") == [HySymbol("-inf")]
assert tokenize("-INF") == [HySymbol("-INF")]
def test_lex_fractions():
""" Make sure that fractions are valid expressions"""
objs = tokenize("1/2")
assert objs == [HyExpression([HySymbol("fraction"), HyInteger(1),
HyInteger(2)])]
def test_lex_nan_and_inf():
assert isnan(tokenize("NaN")[0])
assert tokenize("Nan") == [HySymbol("Nan")]
assert tokenize("nan") == [HySymbol("nan")]
assert tokenize("NAN") == [HySymbol("NAN")]
assert tokenize("Inf") == [HyFloat(float("inf"))]
assert tokenize("inf") == [HySymbol("inf")]
assert tokenize("INF") == [HySymbol("INF")]
assert tokenize("-Inf") == [HyFloat(float("-inf"))]
assert tokenize("-inf") == [HySymbol("-inf")]
assert tokenize("-INF") == [HySymbol("-INF")]