#!/usr/bin/python3
# =============================================================
# The tokenize module provides a lexical scanner for
# Python source code, implemented in Python.
# -------------------------------------------------------------
# documentation:
# a. docs.python.org/3/library/tokenize.html
# b. www.geeksforgeeks.org/stringio-module-in-python/
# =============================================================
from tokenize import tokenize, generate_tokens, \
NUMBER, STRING, NAME, OP
from io import StringIO
import user_interface as ui
# -------------------------------------------------------------
# ---- create tokens
# -------------------------------------------------------------
def tok_it(text):
print()
tokens = [token[1] for token in \
generate_tokens(StringIO(text).readline)]
print(tokens)
# -------------------------------------------------------------
# ---- main
# ---- example expression: 'a = (23/45) + 0.0 -2.2E-2'
# -------------------------------------------------------------
while True:
print()
s = ui.get_user_input('Enter infix expression: ')
if not s: break
tok_it(s)