a
    lcd                     @   s  U d Z ddlmZ ddlZddlZddlZddlmZ ddl	m
Z
mZmZmZmZmZmZmZ ddlmZ ddlmZmZmZ dZejZejZejZejZejZejZejZej Z ej!Z!ej"Z"ej#Z#ej$Z$ej%Z%G d	d
 d
e
Z&e'dZ(i Z)eee&f e*d< ddddZ+dd Z,dddddZ-dd Z.dd Z/dZ0e.de0 d Z1e.de0 d Z2e.dZ3e.dZ4d d! Z5G d"d# d#e
Z6G d$d% d%e6Z7G d&d' d'Z8d(d) Z9d*d+ Z:d,d-e;eee<e<f ee7 d.d/d0Z=d1d2 Z>dd,d3d4ee; eee< ee<e<f ee7 d5d6d7Z?d8d9 Z@eAd:kr~ejBd; ZCeDeCZEeEF ZGW d   n1 sT0    Y  e=eGed<d=D ]ZHeIeH qndS )>a  
This tokenizer has been copied from the ``tokenize.py`` standard library
tokenizer. The reason was simple: The standard library tokenizer fails
if the indentation is not right. To make it possible to do error recovery the
    tokenizer needed to be rewritten.

Basically this is a stripped down version of the standard library module, so
you can read the documentation there. Additionally we included some speed and
memory optimizations here.
    )absolute_importN)BOM_UTF8)
NamedTupleTupleIteratorIterableListDictPatternSet)PythonTokenTypes)split_linesPythonVersionInfoparse_version_stringu   􏿿c                   @   sb   e Zd ZU eed< ee ed< ee ed< eeef ed< eed< eeef ed< ee ed< dS )	TokenCollectionpseudo_tokensingle_quotedtriple_quotedendpats
whitespacefstring_pattern_mapalways_break_tokensN)	__name__
__module____qualname__r
   __annotations__r   strr	   r    r   r   Q/var/www/html/django/DPS/env/lib/python3.9/site-packages/parso/python/tokenize.pyr   +   s   
r   zutf-8_token_collection_cacheFcapturec                 O   s*   |rJ d}| s|d7 }|d | d S )N(z?:|))join)r!   choiceskwargsstartr   r   r   group:   s
    r)   c                  G   s   t |  d S )N?)r)   )r&   r   r   r   maybeC   s    r+   include_fstringonly_fstringc                 C   sr   dd }g d}dh}| r<ddg}|r2|}t  }qF||7 }n
|rFt  S |D ]"}t|D ]}||| qXqJ|S )Nc                 s   s*   t jdd | D  D ]}d|V  qd S )Nc                 S   s   g | ]}||  fqS r   )upper).0cr   r   r   
<listcomp>J       zI_all_string_prefixes.<locals>.different_case_versions.<locals>.<listcomp> )
_itertoolsproductr%   )prefixsr   r   r   different_case_versionsI   s    z5_all_string_prefixes.<locals>.different_case_versions)brubrr4   ffr)setr5   permutationsupdate)r-   r.   r9   Zvalid_string_prefixesresultr>   r7   tr   r   r   _all_string_prefixesH   s    
rE   c                 C   s   t | t jS N)recompileUNICODE)exprr   r   r   _compilee   s    rK   c                 C   s>   zt t|  W S  ty8   t|  t t| < }| Y S 0 d S rF   )r   tupleKeyError_create_token_collection)version_inforC   r   r   r   _get_token_collectioni   s    rP   z"[A-Za-z0-9\-]+(?: [A-Za-z0-9\-]+)*z(?:\{\{|\}\}|\\N\{z*\}|\\(?:\r\n?|\n)|\\[^\r\nN]|[^{}\r\n\\])+z\}|\\[^N]|[^{}\\])+z(?:\\(?:\r\n?|\n)|[^{}\r\n])+z[^{}]+c           -   	   C   s  d}t |}d}dt d }d}d}d}d}t||||}	d	}
td
dt|
 }d|
 }t||}td|d }t|||	}t }t| }ttdd }tddd}t| }d}d}d}d}t|d |d }tddddddd}d}g d }| d!kr|d"d# n|d"d$ t| }t|||}t|d% td&d' |d( td)d' }||g} d*}!|rp| |t|!   td+g| R  }"t|dd,t|"||||dd, }#i }$|D ]F}%t ||$|%d& < t ||$|%d) < t ||$|%d < t ||$|%d < qt }&t }'i }(|D ]:})d-D ]}*|&|)|*  qd.D ]}*|'|)|*  q&q|D ]})|!D ]}*|*|(|)|* < qLqDd/}+t |#},t	|,|&|'|$||(t|+S )0Nz[ \f\t]*z	#[^\r\n]*u   ([A-Za-z_0-9-z]+)z0[xX](?:_?[0-9a-fA-F])+z0[bB](?:_?[01])+z0[oO](?:_?[0-7])+z(?:0(?:_?0)*|[1-9](?:_?[0-9])*)z[eE][-+]?[0-9](?:_?[0-9])*z)[0-9](?:_?[0-9])*\.(?:[0-9](?:_?[0-9])*)?z\.[0-9](?:_?[0-9])*z[0-9](?:_?[0-9])*z[0-9](?:_?[0-9])*[jJ]z[jJ]T)r-   r,   z(?:\\.|[^'\\])*'z(?:\\.|[^"\\])*"z(?:\\.|'(?!'')|[^'\\])*'''z(?:\\.|"(?!"")|[^"\\])*"""'''"""z\*\*=?z>>=?z<<=?z//=?z->z[+\-*/%&@`|^!=<>]=?~z[][(){}])z\.\.\.z\r\n?z\nz[;.,@])      r   z:=?:z'[^\r\n'\\]*(?:\\.[^\r\n'\\]*)*'z\\(?:\r\n?|\n)z"[^\r\n"\\]*(?:\\.[^\r\n"\\]*)*")rX   rW   rR   rQ   z\\(?:\r\n?|\n)|\Zr    )rX   rW   )rR   rQ   );importclassdeftryexceptfinallywhilewithreturncontinuebreakdelpassglobalassertnonlocal)
rK   MAX_UNICODEr)   r+   rE   insertappendr@   addr   )-rO   
Whitespacer   CommentName	Hexnumber	Binnumber	Octnumber	Decnumber	IntnumberExponent
PointfloatExpfloatFloatnumber
ImagnumberNumberZpossible_prefixesStringPrefixZStringPrefixWithFZfstring_prefixesZFStringStartSingleDoubleSingle3Double3TripleOperatorZBracketZspecial_argsSpecialFunnyContStrZpseudo_extra_poolZ
all_quotesPseudoExtrasPseudoTokenr   _prefixr   r   r   rD   quoteZALWAYS_BREAK_TOKENSZpseudo_token_compiledr   r   r   rN   ~   s    



rN   c                   @   sP   e Zd ZU eed< eed< eeef ed< eed< eeeef dddZ	dS )	Tokentypestring	start_posr7   )rb   c                 C   sR   t | j}t|dkr0| jd t| d dfS | jd | jd t| j fS d S N   r   )r   r   lenr   )selflinesr   r   r   end_pos   s    
zToken.end_posN)
r   r   r   r   r   r   r   intpropertyr   r   r   r   r   r      s   
r   c                   @   s   e Zd Zdd ZdS )PythonTokenc                 C   s   d| j | jjd S )Nz6TokenInfo(type=%s, string=%r, start_pos=%r, prefix=%r))r   )_replacer   namer   r   r   r   __repr__   s    zPythonToken.__repr__N)r   r   r   r   r   r   r   r   r      s   r   c                   @   s<   e Zd Zdd Zdd Zdd Zdd Zd	d
 Zdd ZdS )FStringNodec                 C   s"   || _ d| _d| _d | _d| _d S )Nr   r4   )r   parentheses_countprevious_lineslast_string_start_posformat_spec_count)r   r   r   r   r   __init__   s
    zFStringNode.__init__c                 C   s   |  j d7  _ d S )Nr   )r   r   	characterr   r   r   open_parentheses	  s    zFStringNode.open_parenthesesc                 C   s"   |  j d8  _ | j dkrd| _d S r   r   r   r   r   r   r   close_parentheses  s    
zFStringNode.close_parenthesesc                 C   s   t | jdkS )NrT   )r   r   r   r   r   r   allow_multiline  s    zFStringNode.allow_multilinec                 C   s   | j | jkS rF   r   r   r   r   r   
is_in_expr  s    zFStringNode.is_in_exprc                 C   s   |    o| jS rF   )r   r   r   r   r   r   is_in_format_spec  s    zFStringNode.is_in_format_specN)	r   r   r   r   r   r   r   r   r   r   r   r   r   r      s   	r   c           
   	   C   s   t | D ]\}}| }t|t| }||jrtt|j||| f||d |  d}	d}|jrhJ | |d = |	dt|j| f  S qd |dfS )Nr7   r4   r   )	enumeratelstripr   
startswithr   r   FSTRING_ENDr   )
fstring_stackr   Zline_nrcolumnadditional_prefixZfstring_stack_indexnodeZlstripped_stringZ
len_lstriptokenr   r   r   _close_fstring_if_necessary  s    


r   c                 C   s   |d }|  }| r(|r"t}q6t}n|r2t}nt}|||}|d u rT|j|fS |jsd||f|_|	d}	|D ]6}
| |
j
 |	}|d urr|	dd t|
j
  }	qr|}|t|	7 }|	ds|	dr| j|	7  _d}	n
|j|	 }	|	|fS )Nr   
r4   )r   r   fstring_format_spec_multi_linefstring_format_spec_single_linefstring_string_multi_linefstring_string_single_linematchr   r   r)   r   r   endswith)r   r   linelnumpostosr   regexr   r   fstring_stack_node	end_matchnew_posr   r   r   _find_fstring_string.  s4    



r   )r   r   )r   )coderO   r   rb   c                C   s   t | dd}t|||dS )z0Generate tokens from a the source code (string).T)keepends)rO   r   )r   tokenize_lines)r   rO   r   r   r   r   r   tokenizeV  s    r   c                    s    fdd}|S )zL
    A small helper function to help debug the tokenize_lines function.
    c                  ?   s&    | i |D ]}t | |V  qd S rF   )print)argsr'   r   funcr   r   wrapperb  s    z_print_tokens.<locals>.wrapperr   )r   r   r   r   r   _print_tokens^  s    r   T)indentsr   is_first_token)r   rO   r   r   rb   c          .   	   #   s   fdd}t |\}}}}	}
}}d} du r8dg d}d}d}d}d}d}|d d g }| D ]}d7 d}t|}|r|trt}|dd }t|}d	|d  | }|d }||d 7 }d
}|r$||}|r|d}tt||d|  ||V  d}d}n|| }|| }qd||k rd|r|d }| st	|	|||\}}|r|tt
||jddV  d|_q$||krqd||d }t||||\}}}||7 }|dur|V  q$|rP|} |D ]l}!|!j}"|	|" ||}#|#dur|#d}$t|$t|" | t| k r|d| |$dt|"   } q|| |}%n|||}%|%r||%d }d}|%d\}&}|&f|%d}'|'dkr|sJ |}qd|'d }(n(|
||})||)  }(|) }&|&f|rR|(dvrR|(dks|%du rRd
}|dkrR|sR|&}*|* d krDttddV   |* ||*E dH  |%s|
||})|r|dkr|s||) E dH  |) }d
}tt|| |f||)d V  d}|d7 }q$|(|v s|(dkr|'dkr|'dkrtt|'|V  q$|%ddur|'|v rl|s,|rlg |dd< d}td|d|& }+|+durl||+ E dH  |' rtt|'|V  nt|'|E dH  q$|(dv rtdd |D r|  |s|dkr|stt|'|V  n||' }d}q$|(dkr\|'ds|'dr"J |rP|d  rPtt|(|V  |&d }n||' }q$|'|v r|	|' }|||}|r|d}||&| }'tt|'|V  n}||&d }|}qdq$|(|v s|'dd |v s|'dd |v rX|'d dv rD|&f}|	|(p.|	|'d p.|	|'d }||&d }|}qdntt|'|V  q$|'|v r|t||'  tt|'|V  n|(dkr||&d dv r||||&d  7 }qdn|'dv r|r|d  |' n|d7 }nz|'dv r|r|d !|' n|r^|d8 }nJ|'dr^|r^|d j"|d j# dkr^|d  j#d7  _#d}'|&d }tt$|'|V  q$qd|rtt|||V  |ds|drd}|r|d }|jrtt
|j|jddV  |f}, dd D ]}- %  tt&d|,dV  qtt'd|,|V  dS )a)  
    A heavily modified Python standard library tokenizer.

    Additionally to the default information, yields also the prefix of each
    token. This idea comes from lib2to3. The prefix contains all information
    that is irrelevant for the parser like newlines in parentheses or comments.
    c                 3   sT   |  d k rP|  d kr6t td| fdV  |  d< qP   t tddV  q d S )Nr   r4   )r   ERROR_DEDENTpopDEDENT)r(   r   r   sposr   r   dedent_if_necessaryz  s    z+tokenize_lines.<locals>.dedent_if_necessaryr   N
0123456789r4   Tr   ^Fr   r      z
#\.z...rT   z	[ \f\t]*$z
c                 s   s   | ]}|   V  qd S rF   )r   )r0   r>   r   r   r   	<genexpr>  r3   z!tokenize_lines.<locals>.<genexpr>#r   r   )z\
z\
z\z([{z)]}rV   )(rP   r   r   BOM_UTF8_STRINGr   endr   STRINGr   r   FSTRING_STRINGr   r   r   r   r)   spanINDENTrl   
ERRORTOKENNUMBERrG   isidentifierNAME_split_illegal_unicode_nameanyclearNEWLINEr   getr   FSTRING_STARTr   r   r   r   OPr   r   	ENDMARKER).r   rO   r   r   r   r   r   r   r   r   r   r   r   Zparen_levelZmax_numcharscontstrnew_liner7   r   r   r   r   endprogendmatchZcontstr_startcontliner   r   restZfstring_end_tokenZquote_lengthZstring_liner   r   r   Zend_match_stringpseudomatchr(   r   initialr   Zindent_startmr   indentr   r   r   r   k  s   









"



$














r   c                 #   s    fdd}d d|t | D ]\}}rn| rd| V  | dd|d |d | fq |7  q& | }| r| q& r| V  d|d |d | f| dq& r| V  d S )Nc                      s   t r
tnt S rF   )r   r   r   r   foundZ
is_illegalr   r7   r   r   create_token  s    z1_split_illegal_unicode_name.<locals>.create_tokenr4   Fr   r   T)r   r   )r   r   r7   r   icharZ	new_foundr   r   r   r     s0    
r   __main__r   z3.10)rO   )J__doc__
__future__r   sysrG   	itertoolsr5   codecsr   typingr   r   r   r   r   r	   r
   r   Zparso.python.tokenr   Zparso.utilsr   r   r   rj   r   r   r   r   r   r   r   r   r   r   r   r   r   r   decoder   r   r   r)   r+   rE   rK   rP   Zunicode_character_namer   r   r   r   rN   r   r   r   r   r   r   r   r   r   r   r   r   argvpathopenr>   readr   r   r   r   r   r   r   <module>   s   
(

		
l)
  !


(