"""Block-level tokenizer."""importloggingfromtypingimportList,Optional,Tuplefrom.rulerimportRulerfrom.tokenimportTokenfrom.rules_block.state_blockimportStateBlockfrom.importrules_blockLOGGER=logging.getLogger(__name__)_rules:List[Tuple]=[# First 2 params - rule name & source. Secondary array - list of rules,# which can be terminated by this one.("table",rules_block.table,["paragraph","reference"]),("code",rules_block.code),("fence",rules_block.fence,["paragraph","reference","blockquote","list"]),("blockquote",rules_block.blockquote,["paragraph","reference","blockquote","list"],),("hr",rules_block.hr,["paragraph","reference","blockquote","list"]),("list",rules_block.list_block,["paragraph","reference","blockquote"]),("reference",rules_block.reference),("html_block",rules_block.html_block,["paragraph","reference","blockquote"]),("heading",rules_block.heading,["paragraph","reference","blockquote"]),("lheading",rules_block.lheading),("paragraph",rules_block.paragraph),]
[文档]deftokenize(self,state:StateBlock,startLine:int,endLine:int,silent:bool=False)->None:"""Generate tokens for input range."""rules=self.ruler.getRules("")line=startLinemaxNesting=state.md.options.maxNestinghasEmptyLines=Falsewhileline<endLine:state.line=line=state.skipEmptyLines(line)ifline>=endLine:breakifstate.sCount[line]<state.blkIndent:# Termination condition for nested calls.# Nested calls currently used for blockquotes & listsbreakifstate.level>=maxNesting:# If nesting level exceeded - skip tail to the end.# That's not ordinary situation and we should not care about content.state.line=endLinebreak# Try all possible rules.# On success, rule should:# - update `state.line`# - update `state.tokens`# - return Trueforruleinrules:ifrule(state,line,endLine,False):break# set state.tight if we had an empty line before current tag# i.e. latest empty line should not countstate.tight=nothasEmptyLinesline=state.line# paragraph might "eat" one newline after it in nested listsif(line-1)<endLineandstate.isEmpty(line-1):hasEmptyLines=Trueifline<endLineandstate.isEmpty(line):hasEmptyLines=Trueline+=1state.line=line
[文档]defparse(self,src:str,md,env,outTokens:List[Token],ords:Optional[Tuple[int,...]]=None,)->Optional[List[Token]]:"""Process input string and push block tokens into `outTokens`."""ifnotsrc:returnNonestate=StateBlock(src,md,env,outTokens,ords)self.tokenize(state,state.line,state.lineMax)returnstate.tokens