3535from token import *
3636from token import EXACT_TOKEN_TYPES
3737import _tokenize
38+ lazy import _colorize
3839
3940cookie_re = re .compile (br'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)' , re .ASCII )
4041blank_re = re .compile (br'^[ \t\f]*(?:[#\r\n]|$)' , re .ASCII )
@@ -505,6 +506,32 @@ def generate_tokens(readline):
505506 """
506507 return _generate_tokens_from_c_tokenizer (readline , extra_tokens = True )
507508
509+
510+ def _get_token_colors (syntax , tokenize ):
511+ """Map token type numbers to theme colors."""
512+ return frozendict ({
513+ COMMENT : syntax .comment ,
514+ DEDENT : tokenize .whitespace ,
515+ ENCODING : tokenize .whitespace ,
516+ ENDMARKER : tokenize .whitespace ,
517+ ERRORTOKEN : tokenize .error ,
518+ FSTRING_START : syntax .string ,
519+ FSTRING_MIDDLE : syntax .string ,
520+ FSTRING_END : syntax .string ,
521+ INDENT : tokenize .whitespace ,
522+ NAME : syntax .reset ,
523+ NEWLINE : tokenize .whitespace ,
524+ NL : tokenize .whitespace ,
525+ NUMBER : syntax .number ,
526+ OP : syntax .op ,
527+ SOFT_KEYWORD : syntax .soft_keyword ,
528+ STRING : syntax .string ,
529+ TSTRING_START : syntax .string ,
530+ TSTRING_MIDDLE : syntax .string ,
531+ TSTRING_END : syntax .string ,
532+ })
533+
534+
508535def _main (args = None ):
509536 import argparse
510537
@@ -524,7 +551,7 @@ def error(message, filename=None, location=None):
524551 sys .exit (1 )
525552
526553 # Parse the arguments and options
527- parser = argparse .ArgumentParser (color = True )
554+ parser = argparse .ArgumentParser ()
528555 parser .add_argument (dest = 'filename' , nargs = '?' ,
529556 metavar = 'filename.py' ,
530557 help = 'the file to tokenize; defaults to stdin' )
@@ -545,13 +572,30 @@ def error(message, filename=None, location=None):
545572
546573
547574 # Output the tokenization
575+ _theme = _colorize .get_theme ()
576+ s = _theme .syntax
577+ t = _theme .tokenize
578+ _token_colors = _get_token_colors (s , t )
548579 for token in tokens :
549580 token_type = token .type
550581 if args .exact :
551582 token_type = token .exact_type
552- token_range = "%d,%d-%d,%d:" % (token .start + token .end )
553- print ("%-20s%-15s%-15r" %
554- (token_range , tok_name [token_type ], token .string ))
583+ token_range = (
584+ f"{ t .position } { token .start [0 ]} "
585+ f"{ t .delimiter } ,{ t .position } { token .start [1 ]} "
586+ f"{ t .delimiter } -"
587+ f"{ t .position } { token .end [0 ]} "
588+ f"{ t .delimiter } ,{ t .position } { token .end [1 ]} "
589+ f"{ t .delimiter } :"
590+ )
591+ color = _token_colors .get (token_type , s .reset )
592+ token_name = tok_name [token_type ]
593+ visible_range = f"{ token .start [0 ]} ,{ token .start [1 ]} -{ token .end [0 ]} ,{ token .end [1 ]} :"
594+ print (
595+ f"{ token_range } { ' ' * (20 - len (visible_range ))} "
596+ f"{ color } { token_name :<15} "
597+ f"{ s .reset } { token .string !r:<15} "
598+ )
555599 except IndentationError as err :
556600 line , column = err .args [1 ][1 :3 ]
557601 error (err .args [0 ], filename , (line , column ))
0 commit comments