# ~~strike through~~
from typing import List
from .state_inline import StateInline, Delimiter
[文档]def tokenize(state: StateInline, silent: bool):
"""Insert each marker as a separate text token, and add it to delimiter list"""
start = state.pos
marker = state.srcCharCode[start]
if silent:
return False
if marker != 0x7E: # /* ~ */
return False
scanned = state.scanDelims(state.pos, True)
length = scanned.length
ch = chr(marker)
if length < 2:
return False
if length % 2:
token = state.push("text", "", 0)
token.content = ch
length -= 1
i = 0
while i < length:
token = state.push("text", "", 0)
token.content = ch + ch
state.delimiters.append(
Delimiter(
**{
"marker": marker,
"length": 0, # disable "rule of 3" length checks meant for emphasis
"jump": i // 2, # for `~~` 1 marker = 2 characters
"token": len(state.tokens) - 1,
"end": -1,
"open": scanned.can_open,
"close": scanned.can_close,
}
)
)
i += 2
state.pos += scanned.length
return True
def _postProcess(state: StateInline, delimiters: List[Delimiter]):
loneMarkers = []
maximum = len(delimiters)
i = 0
while i < maximum:
startDelim = delimiters[i]
if startDelim.marker != 0x7E: # /* ~ */
i += 1
continue
if startDelim.end == -1:
i += 1
continue
endDelim = delimiters[startDelim.end]
token = state.tokens[startDelim.token]
token.type = "s_open"
token.tag = "s"
token.nesting = 1
token.markup = "~~"
token.content = ""
token = state.tokens[endDelim.token]
token.type = "s_close"
token.tag = "s"
token.nesting = -1
token.markup = "~~"
token.content = ""
if (
state.tokens[endDelim.token - 1].type == "text"
and state.tokens[endDelim.token - 1].content == "~"
):
loneMarkers.append(endDelim.token - 1)
i += 1
# If a marker sequence has an odd number of characters, it's splitted
# like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the
# start of the sequence.
#
# So, we have to move all those markers after subsequent s_close tags.
#
while loneMarkers:
i = loneMarkers.pop()
j = i + 1
while (j < len(state.tokens)) and (state.tokens[j].type == "s_close"):
j += 1
j -= 1
if i != j:
token = state.tokens[j]
state.tokens[j] = state.tokens[i]
state.tokens[i] = token
[文档]def postProcess(state: StateInline):
"""Walk through delimiter list and replace text tokens with tags."""
tokens_meta = state.tokens_meta
maximum = len(state.tokens_meta)
_postProcess(state, state.delimiters)
curr = 0
while curr < maximum:
try:
curr_meta = tokens_meta[curr]
except IndexError:
pass
else:
if curr_meta and "delimiters" in curr_meta:
_postProcess(state, curr_meta["delimiters"])
curr += 1