# Process *this* and _that_#from.state_inlineimportStateInline,Delimiter
[文档]deftokenize(state:StateInline,silent:bool):"""Insert each marker as a separate text token, and add it to delimiter list"""start=state.posmarker=state.srcCharCode[start]ifsilent:returnFalse# /* _ */ /* * */ifmarker!=0x5Fandmarker!=0x2A:returnFalsescanned=state.scanDelims(state.pos,marker==0x2A)foriinrange(scanned.length):token=state.push("text","",0)token.content=chr(marker)state.delimiters.append(Delimiter(marker=marker,length=scanned.length,jump=i,token=len(state.tokens)-1,end=-1,open=scanned.can_open,close=scanned.can_close,))state.pos+=scanned.lengthreturnTrue
def_postProcess(state,delimiters):i=len(delimiters)-1whilei>=0:startDelim=delimiters[i]# /* _ */ /* * */ifstartDelim.marker!=0x5FandstartDelim.marker!=0x2A:i-=1continue# Process only opening markersifstartDelim.end==-1:i-=1continueendDelim=delimiters[startDelim.end]# If the previous delimiter has the same marker and is adjacent to this one,# merge those into one strong delimiter.## `<em><em>whatever</em></em>` -> `<strong>whatever</strong>`#isStrong=(i>0anddelimiters[i-1].end==startDelim.end+1anddelimiters[i-1].token==startDelim.token-1anddelimiters[startDelim.end+1].token==endDelim.token+1anddelimiters[i-1].marker==startDelim.marker)ch=chr(startDelim.marker)token=state.tokens[startDelim.token]token.type="strong_open"ifisStrongelse"em_open"token.tag="strong"ifisStrongelse"em"token.nesting=1token.markup=ch+chifisStrongelsechtoken.content=""token=state.tokens[endDelim.token]token.type="strong_close"ifisStrongelse"em_close"token.tag="strong"ifisStrongelse"em"token.nesting=-1token.markup=ch+chifisStrongelsechtoken.content=""ifisStrong:state.tokens[delimiters[i-1].token].content=""state.tokens[delimiters[startDelim.end+1].token].content=""i-=1i-=1
[文档]defpostProcess(state:StateInline):"""Walk through delimiter list and replace text tokens with tags."""_postProcess(state,state.delimiters)fortokeninstate.tokens_meta:iftokenand"delimiters"intoken:_postProcess(state,token["delimiters"])