__ __ __ __ _____ _ _ _____ _ _ _ | \/ | \ \ / / | __ \ (_) | | / ____| | | | | | \ / |_ __\ V / | |__) | __ ___ ____ _| |_ ___ | (___ | |__ ___| | | | |\/| | '__|> < | ___/ '__| \ \ / / _` | __/ _ \ \___ \| '_ \ / _ \ | | | | | | |_ / . \ | | | | | |\ V / (_| | || __/ ____) | | | | __/ | | |_| |_|_(_)_/ \_\ |_| |_| |_| \_/ \__,_|\__\___| |_____/|_| |_|\___V 2.1 if you need WebShell for Seo everyday contact me on Telegram Telegram Address : @jackleetFor_More_Tools:
# Process *this* and _that_
#
from __future__ import annotations
from .state_inline import Delimiter, StateInline
def tokenize(state: StateInline, silent: bool) -> bool:
"""Insert each marker as a separate text token, and add it to delimiter list"""
start = state.pos
marker = state.src[start]
if silent:
return False
if marker not in ("_", "*"):
return False
scanned = state.scanDelims(state.pos, marker == "*")
for _ in range(scanned.length):
token = state.push("text", "", 0)
token.content = marker
state.delimiters.append(
Delimiter(
marker=ord(marker),
length=scanned.length,
token=len(state.tokens) - 1,
end=-1,
open=scanned.can_open,
close=scanned.can_close,
)
)
state.pos += scanned.length
return True
def _postProcess(state: StateInline, delimiters: list[Delimiter]) -> None:
i = len(delimiters) - 1
while i >= 0:
startDelim = delimiters[i]
# /* _ */ /* * */
if startDelim.marker != 0x5F and startDelim.marker != 0x2A:
i -= 1
continue
# Process only opening markers
if startDelim.end == -1:
i -= 1
continue
endDelim = delimiters[startDelim.end]
# If the previous delimiter has the same marker and is adjacent to this one,
# merge those into one strong delimiter.
#
# `<em><em>whatever</em></em>` -> `<strong>whatever</strong>`
#
isStrong = (
i > 0
and delimiters[i - 1].end == startDelim.end + 1
# check that first two markers match and adjacent
and delimiters[i - 1].marker == startDelim.marker
and delimiters[i - 1].token == startDelim.token - 1
# check that last two markers are adjacent (we can safely assume they match)
and delimiters[startDelim.end + 1].token == endDelim.token + 1
)
ch = chr(startDelim.marker)
token = state.tokens[startDelim.token]
token.type = "strong_open" if isStrong else "em_open"
token.tag = "strong" if isStrong else "em"
token.nesting = 1
token.markup = ch + ch if isStrong else ch
token.content = ""
token = state.tokens[endDelim.token]
token.type = "strong_close" if isStrong else "em_close"
token.tag = "strong" if isStrong else "em"
token.nesting = -1
token.markup = ch + ch if isStrong else ch
token.content = ""
if isStrong:
state.tokens[delimiters[i - 1].token].content = ""
state.tokens[delimiters[startDelim.end + 1].token].content = ""
i -= 1
i -= 1
def postProcess(state: StateInline) -> None:
"""Walk through delimiter list and replace text tokens with tags."""
_postProcess(state, state.delimiters)
for token in state.tokens_meta:
if token and "delimiters" in token:
_postProcess(state, token["delimiters"])
| Name | Type | Size | Permission | Actions |
|---|---|---|---|---|
| __pycache__ | Folder | 0755 |
|
|
| __init__.py | File | 696 B | 0644 |
|
| autolink.py | File | 2.03 KB | 0644 |
|
| backticks.py | File | 1.99 KB | 0644 |
|
| balance_pairs.py | File | 4.74 KB | 0644 |
|
| emphasis.py | File | 3.05 KB | 0644 |
|
| entity.py | File | 1.61 KB | 0644 |
|
| escape.py | File | 1.62 KB | 0644 |
|
| fragments_join.py | File | 1.46 KB | 0644 |
|
| html_inline.py | File | 1.1 KB | 0644 |
|
| image.py | File | 4.04 KB | 0644 |
|
| link.py | File | 4.22 KB | 0644 |
|
| linkify.py | File | 1.66 KB | 0644 |
|
| newline.py | File | 1.27 KB | 0644 |
|
| state_inline.py | File | 4.98 KB | 0644 |
|
| strikethrough.py | File | 3.14 KB | 0644 |
|
| text.py | File | 901 B | 0644 |
|