import py
import error
from lexer import tokenize

TEXT_TAB_LOGIC1 = """\
foo
    bar
"""
def test_tab_logic1():
    tokens = tokenize('test', TEXT_TAB_LOGIC1)
    str_tokens = [str(t) for t in tokens]
    assert str_tokens == ["'foo'", "'nl'",
                          "'indent'", "'bar'", "'nl'",
                          "'dedent'"]

def test_invalid_dedent():
    test1 = """
    a
        a
      b
    """
    py.test.raises(
        error.InvalidIndentationError,
        lambda: tokenize('test', test1))

def test_invalid_dedent():
    test1 = """
    a
        a
      b
    """
    py.test.raises(
        error.InvalidIndentationError,
        lambda: tokenize('test', test1))

def test_no_tabs():
    py.test.raises(
        error.NoTabsError,
        lambda: tokenize('test', "\t"))

