3

U'ícêtã@süUdZddlZddlZddlmZddlmZddlm	Z	ddl
mZddl
mZdd	lmZejrxddlZdd
lmZedƒZejejdfejd
ƒZejdƒZejdejƒZejdejejBƒZ ejdejejBƒZ!e	dƒZ"e	dƒZ#e	dƒZ$e	dƒZ%e	dƒZ&e	dƒZ'e	dƒZ(e	dƒZ)e	dƒZ*e	dƒZ+e	dƒZ,e	dƒZ-e	dƒZ.e	dƒZ/e	d ƒZ0e	d!ƒZ1e	d"ƒZ2e	d#ƒZ3e	d$ƒZ4e	d%ƒZ5e	d&ƒZ6e	d'ƒZ7e	d(ƒZ8e	d)ƒZ9e	d*ƒZ:e	d+ƒZ;e	d,ƒZ<e	d-ƒZ=e	d.ƒZ>e	d/ƒZ?e	d0ƒZ@e	d1ƒZAe	d2ƒZBe	d3ƒZCe	d4ƒZDe	d5ƒZEe	d6ƒZFe	d7ƒZGe	d8ƒZHe	d9ƒZIe	d:ƒZJe	d;ƒZKe	d<ƒZLe	d=ƒZMe	d>ƒZNe	d?ƒZOe	d@ƒZPe	dAƒZQe	dBƒZRe"e:e&e)e2e1e5e;e-e7e.e8e,e6e(e3e*e+e/e0e#e'e$e4e%e9dCœZSdDdE„eSjTƒDƒZUeVeSƒeVeUƒksÈtWdFƒ‚ejdGdHjXdIdJ„eYeSdKdL„dMDƒƒ›dNƒZZe[eHeJeIe<eMeNeOgƒZ\e[e<ePeJeOgƒZ]e^e^dOœdPdQ„Z_dRe^dSœdTdU„Z`e^e^dVœdWdX„Zae^ebdYœdZd[„Zcd\ejdeje^e^fd]œd^d_„ZeGd`da„daƒZfGdbdR„dRejgƒZhGdcdd„ddƒZiGdedf„dfƒZjd\dd]œdgdh„ZkGdidj„djelƒZmGdkdl„dlejgƒZnGdmd„dƒZodS)nzúImplements a Jinja / Python combination lexer. The ``Lexer`` class
is used to do some preprocessing. It filters out invalid operators like
the bitshift operators we don't allow in templates. It separates
template code and python code in expressions.
éN)Úliteral_eval)Údeque)Úinterné)Úpattern)ÚTemplateSyntaxError)ÚLRUCache)ÚEnvironmenté2ÚLexerz\s+z(\r\n|\r|\n)z7('([^'\\]*(?:\\.[^'\\]*)*)'|"([^"\\]*(?:\\.[^"\\]*)*)")z¿
    (
        0b(_?[0-1])+ # binary
    |
        0o(_?[0-7])+ # octal
    |
        0x(_?[\da-f])+ # hex
    |
        [1-9](_?\d)* # decimal
    |
        0(_?0)* # decimal zero
    )
    (?<!\.)  # doesn't start with a .
    (\d+_)*\d+  # digits, possibly _ separated
    (
        (\.(\d+_)*\d+)?  # optional fractional part
        e[+\-]?(\d+_)*\d+  # exponent part
    |
        \.(\d+_)*\d+  # required fractional part
    )
    ÚaddZassignZcolonÚcommaÚdivÚdotÚeqÚfloordivÚgtZgteqZlbraceZlbracketZlparenÚltZlteqÚmodÚmulÚneÚpipeÚpowZrbraceZrbracketZrparenZ	semicolonÚsubÚtildeÚ
whitespaceÚfloatÚintegerÚnameÚstringÚoperatorZblock_beginZ	block_endZvariable_beginZvariable_endZ	raw_beginZraw_endZ
comment_beginZcomment_endÚcommentZlinestatement_beginZlinestatement_endZlinecomment_beginZlinecomment_endZlinecommentÚdataÚinitialÚeof)ú+ú-ú/z//Ú*ú%z**ú~ú[ú]ú(ú)Ú{Ú}z==z!=ú>z>=ú<z<=ú=Ú.ú:ú|ú,ú;cCsi|]\}}||“qS©r9)Ú.0ÚkÚvr9r9ú./tmp/pip-build-gk9425m9/Jinja2/jinja2/lexer.pyú
<dictcomp>sr>zoperators droppedr-r6ccs|]}tj|ƒVqdS)N)ÚreÚescape)r:Úxr9r9r=ú	<genexpr>srBcCs
t|ƒS)N)Úlen)rAr9r9r=Ú<lambda>srD)Úkeyr.)Ú
token_typeÚreturncCsL|tkrt|Stdtdtdtdtdtdtdtdt	dt
d	td
tdij
||ƒS)Nzbegin of commentzend of commentr!zbegin of statement blockzend of statement blockzbegin of print statementzend of print statementzbegin of line statementzend of line statementztemplate data / textzend of template)Úreverse_operatorsÚTOKEN_COMMENT_BEGINÚTOKEN_COMMENT_ENDÚ
TOKEN_COMMENTÚTOKEN_LINECOMMENTÚTOKEN_BLOCK_BEGINÚTOKEN_BLOCK_ENDÚTOKEN_VARIABLE_BEGINÚTOKEN_VARIABLE_ENDÚTOKEN_LINESTATEMENT_BEGINÚTOKEN_LINESTATEMENT_ENDÚ
TOKEN_DATAÚ	TOKEN_EOFÚget)rFr9r9r=Ú_describe_token_type£srVÚToken)ÚtokenrGcCs|jtkr|jSt|jƒS)z#Returns a description of the token.)ÚtypeÚ
TOKEN_NAMEÚvaluerV)rXr9r9r=Údescribe_token·s
r\)ÚexprrGcCs2d|kr&|jddƒ\}}|tkr*|Sn|}t|ƒS)z0Like `describe_token` but for token expressions.r5r)ÚsplitrZrV)r]rYr[r9r9r=Údescribe_token_expr¿sr_)r[rGcCsttj|ƒƒS)zsCount the number of newline characters in the string.  This is
    useful for extensions that filter a stream.
    )rCÚ
newline_reÚfindall)r[r9r9r=Úcount_newlinesÌsrbr	)ÚenvironmentrGcCs°tj}t|jƒt||jƒft|jƒt||jƒft|jƒt||jƒfg}|j	dk	rp|j
t|j	ƒtd||j	ƒfƒ|jdk	rš|j
t|jƒt
d||jƒfƒdd„t|ddDƒS)zACompiles all the rules from the environment into a list of rules.Nz	^[ \t\v]*z(?:^|(?<=\S))[^\S\r\n]*cSsg|]}|dd…‘qS)rNr9)r:rAr9r9r=ú
<listcomp>ùsz!compile_rules.<locals>.<listcomp>T)Úreverse)r?r@rCÚcomment_start_stringrIÚblock_start_stringrMÚvariable_start_stringrOÚline_statement_prefixÚappendrQÚline_comment_prefixÚTOKEN_LINECOMMENT_BEGINÚsorted)rcÚeÚrulesr9r9r=Ú
compile_rulesÓs*



rpc@s>eZdZdZefeejeddœdd„Ze	eddœdd	„Z
dS)
ÚFailurezjClass that raises a `TemplateSyntaxError` if called.
    Used by the `Lexer` to specify known errors.
    N)ÚmessageÚclsrGcCs||_||_dS)N)rrÚerror_class)Úselfrrrsr9r9r=Ú__init__szFailure.__init__zte.NoReturn)ÚlinenoÚfilenamerGcCs|j|j||ƒ‚dS)N)rtrr)rurwrxr9r9r=Ú__call__szFailure.__call__)Ú__name__Ú
__module__Ú__qualname__Ú__doc__rÚstrÚtÚTypervÚintryr9r9r9r=rqüsrqc@sHeZdZUeeeedœdd„Zee	dœdd„Z
ee	dœdd	„Zd
S)rW)rGcCst|ƒS)N)r\)rur9r9r=Ú__str__sz
Token.__str__)r]rGcCs2|j|krdSd|kr.|jddƒ|j|jgkSdS)z¸Test a token against a token expression.  This can either be a
        token type or ``'token_type:token_value'``.  This can only test
        against string values and types.
        Tr5rF)rYr^r[)rur]r9r9r=Útests

z
Token.test)ÚiterablerGcst‡fdd„|DƒƒS)z(Test against multiple token expressions.c3s|]}ˆj|ƒVqdS)N)rƒ)r:r])rur9r=rB$sz!Token.test_any.<locals>.<genexpr>)Úany)rur„r9)rur=Útest_any"szToken.test_anyN)rzr{r|rrwr~rYr[r‚Úboolrƒr†r9r9r9r=rWs
c@s<eZdZdZdddœdd„Zddœdd	„Zedœd
d„ZdS)ÚTokenStreamIteratorz`The iterator for tokenstreams.  Iterate over the stream
    until the eof token is reached.
    ÚTokenStreamN)ÚstreamrGcCs
||_dS)N)rŠ)rurŠr9r9r=rv,szTokenStreamIterator.__init__)rGcCs|S)Nr9)rur9r9r=Ú__iter__/szTokenStreamIterator.__iter__cCs.|jj}|jtkr |jjƒt‚t|jƒ|S)N)rŠÚcurrentrYrTÚcloseÚ
StopIterationÚnext)rurXr9r9r=Ú__next__2s


zTokenStreamIterator.__next__)rzr{r|r}rvr‹rWrr9r9r9r=rˆ'srˆc@säeZdZdZejeejeejedœdd„Z	e
dœdd„Zedœdd	„Z
eedœd
d„ƒZedd
œdd„Zedœdd„Zd!eddœdd„Zeejedœdd„Zeedœdd„Zedœdd„Zddœdd„Zeedœdd „ZdS)"r‰zÛA token stream is an iterable that yields :class:`Token`\s.  The
    parser however does not iterate over it but calls :meth:`next` to go
    one token ahead.  The current active token is stored as :attr:`current`.
    )Ú	generatorrrxcCs>t|ƒ|_tƒ|_||_||_d|_tdtdƒ|_	t
|ƒdS)NFrÚ)ÚiterÚ_iterrÚ_pushedrrxÚclosedrWÚ
TOKEN_INITIALrŒr)rur‘rrxr9r9r=rvCs
zTokenStream.__init__)rGcCst|ƒS)N)rˆ)rur9r9r=r‹QszTokenStream.__iter__cCst|jƒp|jjtk	S)N)r‡r•rŒrYrT)rur9r9r=Ú__bool__TszTokenStream.__bool__cCs|S)z Are we at the end of the stream?r9)rur9r9r=ÚeosWszTokenStream.eosN)rXrGcCs|jj|ƒdS)z Push a token back to the stream.N)r•rj)rurXr9r9r=Úpush\szTokenStream.pushcCs"t|ƒ}|j}|j|ƒ||_|S)zLook at the next token.)rrŒrš)ruZ	old_tokenÚresultr9r9r=Úlook`s

zTokenStream.lookr)ÚnrGcCsxt|ƒD]}t|ƒq
WdS)zGot n tokens ahead.N)Úranger)rurÚ_r9r9r=ÚskiphszTokenStream.skip)r]rGcCs|jj|ƒrt|ƒSdS)zqPerform the token test and return the token if it matched.
        Otherwise the return value is `None`.
        N)rŒrƒr)rur]r9r9r=Únext_ifmszTokenStream.next_ifcCs|j|ƒdk	S)z8Like :meth:`next_if` but only returns `True` or `False`.N)r¡)rur]r9r9r=Úskip_ifvszTokenStream.skip_ifcCsX|j}|jr|jjƒ|_n:|jjtk	rTyt|jƒ|_Wntk
rR|jƒYnX|S)z|Go one token ahead and return the old one.

        Use the built-in :func:`next` instead of calling this directly.
        )	rŒr•ÚpopleftrYrTrr”rŽr)ruÚrvr9r9r=rzszTokenStream.__next__cCs&t|jjtdƒ|_tfƒ|_d|_dS)zClose the stream.r’TN)rWrŒrwrTr“r”r–)rur9r9r=r‹s
zTokenStream.closecCsn|jj|ƒsft|ƒ}|jjtkr>td|›d|jj|j|jƒ‚td|›dt	|jƒ›|jj|j|jƒ‚t
|ƒS)z}Expect a given token type and return it.  This accepts the same
        argument as :meth:`jinja2.lexer.Token.test`.
        z%unexpected end of template, expected r4zexpected token z, got )rŒrƒr_rYrTrrwrrxr\r)rur]r9r9r=Úexpect‘s
zTokenStream.expect)r)rzr{r|r}rÚIterablerWÚOptionalr~rvrˆr‹r‡r˜Úpropertyr™ršrœrr r¡r¢rrr¥r9r9r9r=r‰=s 
	r‰cCsZ|j|j|j|j|j|j|j|j|j|j	|j
|jf}tj
|ƒ}|dkrVt|ƒt|<}|S)z(Return a lexer which is probably cached.N)rgÚblock_end_stringrhÚvariable_end_stringrfÚcomment_end_stringrirkÚtrim_blocksÚ
lstrip_blocksÚnewline_sequenceÚkeep_trailing_newlineÚ_lexer_cacherUr)rcrEÚlexerr9r9r=Ú	get_lexerªs 
r²cs$eZdZdZfZ‡fdd„Z‡ZS)ÚOptionalLStripzWA special tuple for marking a point in the state that can have
    lstrip applied.
    cstƒj||ƒS)N)ÚsuperÚ__new__)rsÚmembersÚkwargs)Ú	__class__r9r=rµËszOptionalLStrip.__new__)rzr{r|r}Ú	__slots__rµÚ
__classcell__r9r9)r¸r=r³Âsr³c@sBeZdZUejeejeejedfeje	f
ejedS)Ú_Rule.N)
rzr{r|rÚPatternr~rÚUnionÚTuplerqÚtokensr§Úcommandr9r9r9r=r»Ïs

 r»c
@sÐeZdZdZdddœdd„Zeedœdd	„Zdeejeejeejee	d
œdd„Z
dejeje
eefejeejeejed
œdd„Zdeejeejeejeejeje
eefd
œdd„ZdS)ra
Class that implements a lexer for a given environment. Automatically
    created by the environment class, usually you don't have to do that.

    Note that the lexer is not automatically bound to an environment.
    Multiple environments can share the same lexer.
    r	N)rcrGc
Cs0tj}ttjtdœdd„}tttdƒttt	dƒtt
tdƒttt
dƒtttdƒtttdƒg}t|ƒ}||jƒ}||jƒ}||jƒ}||jƒ}	|jr–dnd}
|jr¨|dƒnd|_|j|_|j|_d|›d|›d	|›d
}dj|gdd
„|Dƒƒ}dt|d|›dƒttdƒdƒt|dƒtdƒgtt|d|›d|›d	|›|
›d
ƒt t!fdƒt|dƒt"dƒfdƒgt#t|d|›d|›d	|›|
›dƒt$dƒg|t%t|d|	›d	|	›ƒt&dƒg|t't|d|›d|›d|›d	|›|
›d

ƒttt(ƒdƒt|dƒt"dƒfdƒgt)t|dƒt*dƒg|t+t|dƒt,t-fdƒgi|_.dS)N)rArGcSstj|tjtjBƒS)N)r?ÚcompileÚMÚS)rAr9r9r=ÚcászLexer.__init__.<locals>.cz\n?r’z[^ \t]z(?P<raw_begin>z(\-|\+|)\s*raw\s*(?:\-z\s*|z))r6cSs"g|]\}}d|›d|›d‘qS)z(?P<r1z	(\-|\+|))r9)r:rÚrr9r9r=rd
sz"Lexer.__init__.<locals>.<listcomp>Úrootz(.*?)(?:r.z#bygroupz.+z(.*?)((?:\+z|\-z#popz(.)zMissing end of comment tagz(?:\+z\-z	(.*?)((?:z(\-|\+|))\s*endraw\s*(?:\+zMissing end of raw directivez	\s*(\n|$)z(.*?)()(?=\n|$))/r?r@r~rr¼r»Ú
whitespace_reÚTOKEN_WHITESPACEÚfloat_reÚTOKEN_FLOATÚ
integer_reÚ
TOKEN_INTEGERÚname_rerZÚ	string_reÚTOKEN_STRINGÚoperator_reÚTOKEN_OPERATORrprgr©r«rªr¬r­Úlstrip_unless_rer®r¯Újoinr³rSrIrKrJrqrMrNrOrPÚTOKEN_RAW_BEGINÚ
TOKEN_RAW_ENDrQrRrlrLÚTOKEN_LINECOMMENT_ENDro)
rurcrnrÄZ	tag_rulesZroot_tag_rulesZblock_start_reZblock_end_reZcomment_end_reZvariable_end_reZblock_suffix_reZroot_raw_reZ
root_parts_rer9r9r=rvÝsz




	



"zLexer.__init__)r[rGcCstj|j|ƒS)z`Replace all newlines with the configured sequence in strings
        and template data.
        )r`rr®)rur[r9r9r=Ú_normalize_newlinesVszLexer._normalize_newlines)ÚsourcerrxÚstaterGcCs&|j||||ƒ}t|j|||ƒ||ƒS)z:Calls tokeniter + tokenize and wraps it in a token stream.)Ú	tokeniterr‰Úwrap)rurØrrxrÙrŠr9r9r=Útokenize\szLexer.tokenize)rŠrrxrGc
cszxr|D]h\}}}|tkrq|}|tkr2t}n2|tkrBt}n"|ttfkrTqn|tkrj|j|ƒ}nú|dkrx|}nì|t	krœ|}|j
ƒsštd|||ƒ‚nÈ|tkry$|j|dd…ƒj
ddƒjdƒ}WnJtk
r}z,t|ƒjdƒd
jƒ}	t|	|||ƒ|‚WYdd}~XnXnL|tkr6t|jd	d
ƒdƒ}n.|tkrRt|jd	d
ƒƒ}n|tkrdt|}t|||ƒVqWdS)z‹This is called with the stream as returned by `tokenize` and wraps
        every token in a :class:`Token` and converts the value.
        ÚkeywordzInvalid character in identifierrÚasciiÚbackslashreplacezunicode-escaper5NrŸr’réÿÿÿÿrà)Úignored_tokensrQrMrRrNrÔrÕrSr×rZÚisidentifierrrÏÚencodeÚdecodeÚ	Exceptionr~r^ÚstriprÌrÚreplacerÊrrÑÚ	operatorsrW)
rurŠrrxrwrXZ	value_strr[rnÚmsgr9r9r=rÛgsD	

$


z
Lexer.wrapccsZtj|ƒddd…}|jr.|d!dkr.|d"=dj|ƒ}d}d}dg}|dk	rt|dkrt|d#ksftd
ƒ‚|j|dƒ|j|d$}	t|ƒ}
g}|j}d}
d}x¶x®|	D]x\}}}|j	||ƒ}|dkrÊq¨|rÞ|t
ttfkrÞq¨t
|tƒr’|jƒ}t
|tƒrÊ|d}td
d„|ddd…Dƒƒ}|dkr^|jƒ}|t|ƒd…jdƒ}
|f|dd…•}nl|dkrÊ|dk	rÊ|jƒjtƒrÊ|jdƒd}|dks¢|rÊ|j||ƒsÊ|d|…f|dd…•}xÄt|ƒD]¸\}}|jtkrô|||ƒ‚n–|dkrPxŠ|jƒjƒD]0\}}|dk	r|||fV||jdƒ7}PqWt|›dƒ‚n:||}|sh|tkrt|||fV||jdƒ|
7}d}
qÔWnÒ|jƒ}|t kr:|dkrº|jdƒn€|dkrÐ|jdƒnj|dkræ|jdƒnT|d%kr:|st!d|›d|||ƒ‚|j"ƒ}||kr:t!d|›d|›d|||ƒ‚|sJ|tkrV|||fV||jdƒ7}|jƒd&d…dk}|j#ƒ}|dk	r|dkrž|j"ƒnV|dkrêxJ|jƒjƒD] \}}|dk	r¶|j|ƒPq¶Wt|›dƒ‚n
|j|ƒ|j|d'}	n||krt|›dƒ‚|}Pq¨W||
kr4dSt!d||›d |›|||ƒ‚q WdS)(aThis method tokenizes the text and returns the tokens in a
        generator. Use this method if you just want to tokenize a template.

        .. versionchanged:: 3.0
            Only ``\n``, ``\r\n`` and ``\r`` are treated as line
            breaks.
        Nérr’Ú
rrÆÚvariableÚblockz
invalid stateZ_beginTcss|]}|dk	r|VqdS)Nr9)r:Úgr9r9r=rBßsz"Lexer.tokeniter.<locals>.<genexpr>r&r%z#bygroupz= wanted to resolve the token dynamically but no group matchedr/r0r-r.r+r,zunexpected 'ú'z
', expected 'z#popzA wanted to resolve the new state dynamically but no group matchedz* yielded empty string without stack changezunexpected char z at ràrà)rìrírà)r0r.r,ràrà)$r`r^r¯rÓÚAssertionErrorrjrorCrÒÚmatchrPrNrRÚ
isinstanceÚtupleÚgroupsr³rÚrstripÚcountÚ	groupdictrUrOÚrfindÚsearchÚ	enumerater¸rqÚitemsÚRuntimeErrorÚignore_if_emptyÚgrouprÑrÚpopÚend)rurØrrxrÙÚlinesÚposrwÚstackZstatetokensZ
source_lengthZbalancing_stackrÒZnewlines_strippedZ
line_startingÚregexr¿Z	new_stateÚmrôÚtextZ
strip_signÚstrippedZl_posÚidxrXrEr[r"Zexpected_opÚpos2r9r9r=rڝsÎ




















zLexer.tokeniter)NNN)NN)NN)rzr{r|r}rvr~r×rr§r‰rÜr¦r¾rÚIteratorrWrÛrÚr9r9r9r=rÕsy	&5)pr}r?ÚtypingrÚastrÚcollectionsrÚsysrZ_identifierrrÍÚ
exceptionsrÚutilsrZ
TYPE_CHECKINGZtyping_extensionsÚtercr	r°ÚMutableMappingr¾rÁrÇr`rÃrÎÚ
IGNORECASEÚVERBOSErËrÉZ	TOKEN_ADDZTOKEN_ASSIGNZTOKEN_COLONZTOKEN_COMMAZ	TOKEN_DIVZ	TOKEN_DOTZTOKEN_EQZTOKEN_FLOORDIVZTOKEN_GTZ
TOKEN_GTEQZTOKEN_LBRACEZTOKEN_LBRACKETZTOKEN_LPARENZTOKEN_LTZ
TOKEN_LTEQZ	TOKEN_MODZ	TOKEN_MULZTOKEN_NEZ
TOKEN_PIPEZ	TOKEN_POWZTOKEN_RBRACEZTOKEN_RBRACKETZTOKEN_RPARENZTOKEN_SEMICOLONZ	TOKEN_SUBZTOKEN_TILDErÈrÊrÌrZrÏrÑrMrNrOrPrÔrÕrIrJrKrQrRrlrÖrLrSr—rTrèrûrHrCrðrÓrmrÐÚ	frozensetrárýr~rVr\r_rrbÚListrprqZ
NamedTuplerWrˆr‰r²rór³r»rr9r9r9r=Ú<module>sø




*
 )m