2d077da4c302125b96738c591b3c4f1b2e5b5ab4a644e7bcc6403b471d8c7be8.json 3.9 KB

1
  1. {"ast":null,"code":"// Clean up tokens after emphasis and strikethrough postprocessing:\n// merge adjacent text nodes into one and re-calculate all token levels\n//\n// This is necessary because initially emphasis delimiter markers (*, _, ~)\n// are treated as their own separate text tokens. Then emphasis rule either\n// leaves them as text (needed to merge with adjacent text) or turns them\n// into opening/closing tags (which messes up levels inside).\n//\n\nexport default function fragments_join(state) {\n let curr, last;\n let level = 0;\n const tokens = state.tokens;\n const max = state.tokens.length;\n for (curr = last = 0; curr < max; curr++) {\n // re-calculate levels after emphasis/strikethrough turns some text nodes\n // into opening/closing tags\n if (tokens[curr].nesting < 0) level--; // closing tag\n tokens[curr].level = level;\n if (tokens[curr].nesting > 0) level++; // opening tag\n\n if (tokens[curr].type === 'text' && curr + 1 < max && tokens[curr + 1].type === 'text') {\n // collapse two adjacent text nodes\n tokens[curr + 1].content = tokens[curr].content + tokens[curr + 1].content;\n } else {\n if (curr !== last) {\n tokens[last] = tokens[curr];\n }\n last++;\n }\n }\n if (curr !== last) {\n tokens.length = last;\n }\n}","map":{"version":3,"names":["fragments_join","state","curr","last","level","tokens","max","length","nesting","type","content"],"sources":["F:/workspace/202226701027/huinongbao-app/node_modules/markdown-it/lib/rules_inline/fragments_join.mjs"],"sourcesContent":["// Clean up tokens after emphasis and strikethrough postprocessing:\n// merge adjacent text nodes into one and re-calculate all token levels\n//\n// This is necessary because initially emphasis delimiter markers (*, _, ~)\n// are treated as their own separate text tokens. Then emphasis rule either\n// leaves them as text (needed to merge with adjacent text) or turns them\n// into opening/closing tags (which messes up levels inside).\n//\n\nexport default function fragments_join (state) {\n let curr, last\n let level = 0\n const tokens = state.tokens\n const max = state.tokens.length\n\n for (curr = last = 0; curr < max; curr++) {\n // re-calculate levels after emphasis/strikethrough turns some text nodes\n // into opening/closing tags\n if (tokens[curr].nesting < 0) level-- // closing tag\n tokens[curr].level = level\n if (tokens[curr].nesting > 0) level++ // opening tag\n\n if (tokens[curr].type === 'text' &&\n curr + 1 < max &&\n tokens[curr + 1].type === 'text') {\n // collapse two adjacent text nodes\n tokens[curr + 1].content = tokens[curr].content + tokens[curr + 1].content\n } else {\n if (curr !== last) { tokens[last] = tokens[curr] }\n\n last++\n }\n }\n\n if (curr !== last) {\n tokens.length = last\n }\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,eAAe,SAASA,cAAcA,CAAEC,KAAK,EAAE;EAC7C,IAAIC,IAAI,EAAEC,IAAI;EACd,IAAIC,KAAK,GAAG,CAAC;EACb,MAAMC,MAAM,GAAGJ,KAAK,CAACI,MAAM;EAC3B,MAAMC,GAAG,GAAGL,KAAK,CAACI,MAAM,CAACE,MAAM;EAE/B,KAAKL,IAAI,GAAGC,IAAI,GAAG,CAAC,EAAED,IAAI,GAAGI,GAAG,EAAEJ,IAAI,EAAE,EAAE;IACxC;IACA;IACA,IAAIG,MAAM,CAACH,IAAI,CAAC,CAACM,OAAO,GAAG,CAAC,EAAEJ,KAAK,EAAE,EAAC;IACtCC,MAAM,CAACH,IAAI,CAAC,CAACE,KAAK,GAAGA,KAAK;IAC1B,IAAIC,MAAM,CAACH,IAAI,CAAC,CAACM,OAAO,GAAG,CAAC,EAAEJ,KAAK,EAAE,EAAC;;IAEtC,IAAIC,MAAM,CAACH,IAAI,CAAC,CAACO,IAAI,KAAK,MAAM,IAC5BP,IAAI,GAAG,CAAC,GAAGI,GAAG,IACdD,MAAM,CAACH,IAAI,GAAG,CAAC,CAAC,CAACO,IAAI,KAAK,MAAM,EAAE;MACpC;MACAJ,MAAM,CAACH,IAAI,GAAG,CAAC,CAAC,CAACQ,OAAO,GAAGL,MAAM,CAACH,IAAI,CAAC,CAACQ,OAAO,GAAGL,MAAM,CAACH,IAAI,GAAG,CAAC,CAAC,CAACQ,OAAO;IAC5E,CAAC,MAAM;MACL,IAAIR,IAAI,KAAKC,IAAI,EAAE;QAAEE,MAAM,CAACF,IAAI,CAAC,GAAGE,MAAM,CAACH,IAAI,CAAC;MAAC;MAEjDC,IAAI,EAAE;IACR;EACF;EAEA,IAAID,IAAI,KAAKC,IAAI,EAAE;IACjBE,MAAM,CAACE,MAAM,GAAGJ,IAAI;EACtB;AACF","ignoreList":[]},"metadata":{},"sourceType":"module","externalDependencies":[]}