
    |eV                         d Z ddlZddlmZmZ ddlmZmZmZm	Z	 ddl
mZ g dZ G d de      Z G d	 d
e      Zi Z G d de      Zy)z
    pygments.lexers.special
    ~~~~~~~~~~~~~~~~~~~~~~~

    Special lexers.

    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
    N)Lexerline_re)TokenErrorTextGeneric)get_choice_opt)	TextLexerOutputLexerRawTokenLexerc                   6    e Zd ZdZdZdgZdgZdgZdZd Z	d Z
y	)
r
   z3
    "Null" lexer, doesn't highlight anything.
    z	Text onlytextz*.txtz
text/plaing{Gz?c              #   "   K   dt         |f y wNr   )r   selfr   s     9/usr/lib/python3/dist-packages/pygments/lexers/special.pyget_tokens_unprocessedz TextLexer.get_tokens_unprocessed   s     tms   c                 "    t         j                  S )N)r
   priority)r   s    r   analyse_textzTextLexer.analyse_text"   s    !!!    N)__name__
__module____qualname____doc__namealiases	filenames	mimetypesr   r   r    r   r   r
   r
      s2     DhG	IIH"r   r
   c                        e Zd ZdZdZdgZd Zy)r   zj
    Simple lexer that highlights everything as ``Token.Generic.Output``.

    .. versionadded:: 2.10
    zText outputoutputc              #   6   K   dt         j                  |f y wr   )r   Outputr   s     r   r   z"OutputLexer.get_tokens_unprocessed/   s     %%s   N)r   r   r   r   r   r   r   r!   r   r   r   r   &   s    
 DjG&r   r   c                   4    e Zd ZdZdZg Zg ZdgZd Zd Z	d Z
y)r   a  
    Recreate a token stream formatted with the `RawTokenFormatter`.

    Additional options accepted:

    `compress`
        If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
        the given compression algorithm before lexing (default: ``""``).
    zRaw token datazapplication/x-pygments-tokensc                 Z    t        |dg dd      | _        t        j                  | fi | y )Ncompress) nonegzbz2r)   )r	   r(   r   __init__)r   optionss     r   r-   zRawTokenLexer.__init__E   s)    &w
'@"Ft'w'r   c              #     K   | j                   rkt        |t              r|j                  d      }	 | j                   dk(  rdd l}|j                  |      }n$| j                   dk(  rdd l}|j                  |      }t        |t              r|j                  d      }|j                  d      dz   }| j                  |      D ]  \  }}}||f  y # t        $ r t        |j                  d      f Y zw xY ww)Nlatin1r+   r   r,   
)r(   
isinstancestrencodegzip
decompressr,   OSErrorr   decodebytesstripr   )r   r   r5   r,   itvs          r   
get_tokenszRawTokenLexer.get_tokensJ   s     ==$${{8,3==D(??40D]]e+>>$/D dE";;x(D zz$$&2248 	GAq!Q$J	  3T[[2223s*   .C8A	C :AC8"C52C84C55C8c              #   d  K   d}t        j                  |      D ]  }	 |j                         j                         j	                  dd      \  }}t
        j                  |      }|sVt        }|j	                  d      dd  }|D ].  }|r|d   j                         st        d      t        ||      }0 |t
        |<   t        j                  |      }t        |t              st        d      	 |||f |t#        |      z  } y # t        t        f$ r |j                         }t         }Y ?w xY ww)Nr   	   .zmalformed token namezexpected str)r   finditergrouprstripsplit_ttype_cachegetr   isupper
ValueErrorgetattrastliteral_evalr2   r3   SyntaxErrorr   len)	r   r   lengthmatchttypestrvalttypettypesttype_s	            r   r   z$RawTokenLexer.get_tokens_unprocessed_   s-    %%d+ 	E % 4 4 6 < <T1 E#$((2!E%^^C04F"( 7%VAY->->-@",-C"DD 'v 67 .3L*&&s+!#s+$^44 ,
 %$$c#hF'	  , kkms)   D0CD,D0%D-*D0,D--D0N)r   r   r   r   r   r   r   r    r-   r>   r   r!   r   r   r   r   6   s/     DGI01I(
*r   r   )r   rL   pygments.lexerr   r   pygments.tokenr   r   r   r   pygments.utilr	   __all__r
   r   rG   r   r!   r   r   <module>r[      sM     ) 6 6 ( 8" ""
&% 
& >E >r   