B
    (‹dV  ã               @   s‚   d Z ddlZddlmZmZ ddlmZmZmZm	Z	 ddl
mZ dddgZG d	d„ deƒZG d
d„ deƒZi ZG dd„ deƒZdS )zÂ
    pygments.lexers.special
    ~~~~~~~~~~~~~~~~~~~~~~~

    Special lexers.

    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
é    N)ÚLexerÚline_re)ÚTokenÚErrorÚTextÚGeneric)Úget_choice_optÚ	TextLexerÚOutputLexerÚRawTokenLexerc               @   s:   e Zd ZdZdZdgZdgZdgZdZdd„ Z	d	d
„ Z
dS )r	   z3
    "Null" lexer, doesn't highlight anything.
    z	Text onlyÚtextz*.txtz
text/plaing{®Gáz„?c             c   s   dt |fV  d S )Nr   )r   )Úselfr   © r   úd/work/yifan.wang/ringdown/master-ringdown-env/lib/python3.7/site-packages/pygments/lexers/special.pyÚget_tokens_unprocessed   s    z TextLexer.get_tokens_unprocessedc             C   s   t jS )N)r	   Úpriority)r   r   r   r   Úanalyse_text"   s    zTextLexer.analyse_textN)Ú__name__Ú
__module__Ú__qualname__Ú__doc__ÚnameÚaliasesÚ	filenamesÚ	mimetypesr   r   r   r   r   r   r   r	      s   c               @   s"   e Zd ZdZdZdgZdd„ ZdS )r
   zj
    Simple lexer that highlights everything as ``Token.Generic.Output``.

    .. versionadded:: 2.10
    zText outputÚoutputc             c   s   dt j|fV  d S )Nr   )r   ÚOutput)r   r   r   r   r   r   /   s    z"OutputLexer.get_tokens_unprocessedN)r   r   r   r   r   r   r   r   r   r   r   r
   &   s   c               @   s:   e Zd ZdZdZg Zg ZdgZdd„ Zdd„ Z	dd	„ Z
d
S )r   a  
    Recreate a token stream formatted with the `RawTokenFormatter`.

    Additional options accepted:

    `compress`
        If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
        the given compression algorithm before lexing (default: ``""``).
    zRaw token datazapplication/x-pygments-tokensc             K   s*   t |dddddgdƒ| _tj| f|Ž d S )NÚcompressÚ ÚnoneÚgzÚbz2)r   r   r   Ú__init__)r   Úoptionsr   r   r   r"   E   s    zRawTokenLexer.__init__c             c   sÈ   | j r~t|tƒr| d¡}y>| j dkr:dd l}| |¡}n| j dkrVdd l}| |¡}W n$ tk
r|   t| 	d¡fV  Y nX t|t
ƒr’| 	d¡}| d¡d }x"|  |¡D ]\}}}||fV  q¬W d S )NÚlatin1r    r   r!   Ú
)r   Ú
isinstanceÚstrÚencodeÚgzipÚ
decompressr!   ÚOSErrorr   ÚdecodeÚbytesÚstripr   )r   r   r)   r!   ÚiÚtÚvr   r   r   Ú
get_tokensJ   s"    





zRawTokenLexer.get_tokensc       	   
   c   sî   d}xät  |¡D ]Ö}y”| ¡  ¡  dd¡\}}t |¡}|sŠt}| d¡dd … }x.|D ]&}|rl|d  ¡ stt	dƒ‚t
||ƒ}qXW |t|< t |¡}t|tƒs¦t	dƒ‚W n$ tt	fk
rÌ   | ¡ }t}Y nX |||fV  |t|ƒ7 }qW d S )Nr   ú	é   Ú.zmalformed token namezexpected str)r   ÚfinditerÚgroupÚrstripÚsplitÚ_ttype_cacheÚgetr   ÚisupperÚ
ValueErrorÚgetattrÚastÚliteral_evalr&   r'   ÚSyntaxErrorr   Úlen)	r   r   ÚlengthÚmatchZttypestrÚvalÚttypeZttypesZttype_r   r   r   r   _   s*    




z$RawTokenLexer.get_tokens_unprocessedN)r   r   r   r   r   r   r   r   r"   r2   r   r   r   r   r   r   6   s   	)r   r?   Zpygments.lexerr   r   Zpygments.tokenr   r   r   r   Zpygments.utilr   Ú__all__r	   r
   r:   r   r   r   r   r   Ú<module>	   s   
