4cda16e0d6381afdd7e1388da5535314da56c81186abc2299627fb1a18d2326c.json 9.9 KB

1
  1. {"ast":null,"code":"// ~~strike through~~\n//\n\n// Insert each marker as a separate text token, and add it to delimiter list\n//\nfunction strikethrough_tokenize(state, silent) {\n const start = state.pos;\n const marker = state.src.charCodeAt(start);\n if (silent) {\n return false;\n }\n if (marker !== 0x7E /* ~ */) {\n return false;\n }\n const scanned = state.scanDelims(state.pos, true);\n let len = scanned.length;\n const ch = String.fromCharCode(marker);\n if (len < 2) {\n return false;\n }\n let token;\n if (len % 2) {\n token = state.push('text', '', 0);\n token.content = ch;\n len--;\n }\n for (let i = 0; i < len; i += 2) {\n token = state.push('text', '', 0);\n token.content = ch + ch;\n state.delimiters.push({\n marker,\n length: 0,\n // disable \"rule of 3\" length checks meant for emphasis\n token: state.tokens.length - 1,\n end: -1,\n open: scanned.can_open,\n close: scanned.can_close\n });\n }\n state.pos += scanned.length;\n return true;\n}\nfunction postProcess(state, delimiters) {\n let token;\n const loneMarkers = [];\n const max = delimiters.length;\n for (let i = 0; i < max; i++) {\n const startDelim = delimiters[i];\n if (startDelim.marker !== 0x7E /* ~ */) {\n continue;\n }\n if (startDelim.end === -1) {\n continue;\n }\n const endDelim = delimiters[startDelim.end];\n token = state.tokens[startDelim.token];\n token.type = 's_open';\n token.tag = 's';\n token.nesting = 1;\n token.markup = '~~';\n token.content = '';\n token = state.tokens[endDelim.token];\n token.type = 's_close';\n token.tag = 's';\n token.nesting = -1;\n token.markup = '~~';\n token.content = '';\n if (state.tokens[endDelim.token - 1].type === 'text' && state.tokens[endDelim.token - 1].content === '~') {\n loneMarkers.push(endDelim.token - 1);\n }\n }\n\n // If a marker sequence has an odd number of characters, it's splitted\n // like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the\n // start of the sequence.\n //\n // So, we have to move all those markers after subsequent s_close tags.\n //\n while (loneMarkers.length) {\n const i = loneMarkers.pop();\n let j = i + 1;\n while (j < state.tokens.length && state.tokens[j].type === 's_close') {\n j++;\n }\n j--;\n if (i !== j) {\n token = state.tokens[j];\n state.tokens[j] = state.tokens[i];\n state.tokens[i] = token;\n }\n }\n}\n\n// Walk through delimiter list and replace text tokens with tags\n//\nfunction strikethrough_postProcess(state) {\n const tokens_meta = state.tokens_meta;\n const max = state.tokens_meta.length;\n postProcess(state, state.delimiters);\n for (let curr = 0; curr < max; curr++) {\n if (tokens_meta[curr] && tokens_meta[curr].delimiters) {\n postProcess(state, tokens_meta[curr].delimiters);\n }\n }\n}\nexport default {\n tokenize: strikethrough_tokenize,\n postProcess: strikethrough_postProcess\n};","map":{"version":3,"names":["strikethrough_tokenize","state","silent","start","pos","marker","src","charCodeAt","scanned","scanDelims","len","length","ch","String","fromCharCode","token","push","content","i","delimiters","tokens","end","open","can_open","close","can_close","postProcess","loneMarkers","max","startDelim","endDelim","type","tag","nesting","markup","pop","j","strikethrough_postProcess","tokens_meta","curr","tokenize"],"sources":["F:/workspace/202226701027/huinongbao-app/node_modules/markdown-it/lib/rules_inline/strikethrough.mjs"],"sourcesContent":["// ~~strike through~~\n//\n\n// Insert each marker as a separate text token, and add it to delimiter list\n//\nfunction strikethrough_tokenize (state, silent) {\n const start = state.pos\n const marker = state.src.charCodeAt(start)\n\n if (silent) { return false }\n\n if (marker !== 0x7E/* ~ */) { return false }\n\n const scanned = state.scanDelims(state.pos, true)\n let len = scanned.length\n const ch = String.fromCharCode(marker)\n\n if (len < 2) { return false }\n\n let token\n\n if (len % 2) {\n token = state.push('text', '', 0)\n token.content = ch\n len--\n }\n\n for (let i = 0; i < len; i += 2) {\n token = state.push('text', '', 0)\n token.content = ch + ch\n\n state.delimiters.push({\n marker,\n length: 0, // disable \"rule of 3\" length checks meant for emphasis\n token: state.tokens.length - 1,\n end: -1,\n open: scanned.can_open,\n close: scanned.can_close\n })\n }\n\n state.pos += scanned.length\n\n return true\n}\n\nfunction postProcess (state, delimiters) {\n let token\n const loneMarkers = []\n const max = delimiters.length\n\n for (let i = 0; i < max; i++) {\n const startDelim = delimiters[i]\n\n if (startDelim.marker !== 0x7E/* ~ */) {\n continue\n }\n\n if (startDelim.end === -1) {\n continue\n }\n\n const endDelim = delimiters[startDelim.end]\n\n token = state.tokens[startDelim.token]\n token.type = 's_open'\n token.tag = 's'\n token.nesting = 1\n token.markup = '~~'\n token.content = ''\n\n token = state.tokens[endDelim.token]\n token.type = 's_close'\n token.tag = 's'\n token.nesting = -1\n token.markup = '~~'\n token.content = ''\n\n if (state.tokens[endDelim.token - 1].type === 'text' &&\n state.tokens[endDelim.token - 1].content === '~') {\n loneMarkers.push(endDelim.token - 1)\n }\n }\n\n // If a marker sequence has an odd number of characters, it's splitted\n // like this: `~~~~~` -> `~` + `~~` + `~~`, leaving one marker at the\n // start of the sequence.\n //\n // So, we have to move all those markers after subsequent s_close tags.\n //\n while (loneMarkers.length) {\n const i = loneMarkers.pop()\n let j = i + 1\n\n while (j < state.tokens.length && state.tokens[j].type === 's_close') {\n j++\n }\n\n j--\n\n if (i !== j) {\n token = state.tokens[j]\n state.tokens[j] = state.tokens[i]\n state.tokens[i] = token\n }\n }\n}\n\n// Walk through delimiter list and replace text tokens with tags\n//\nfunction strikethrough_postProcess (state) {\n const tokens_meta = state.tokens_meta\n const max = state.tokens_meta.length\n\n postProcess(state, state.delimiters)\n\n for (let curr = 0; curr < max; curr++) {\n if (tokens_meta[curr] && tokens_meta[curr].delimiters) {\n postProcess(state, tokens_meta[curr].delimiters)\n }\n }\n}\n\nexport default {\n tokenize: strikethrough_tokenize,\n postProcess: strikethrough_postProcess\n}\n"],"mappings":"AAAA;AACA;;AAEA;AACA;AACA,SAASA,sBAAsBA,CAAEC,KAAK,EAAEC,MAAM,EAAE;EAC9C,MAAMC,KAAK,GAAGF,KAAK,CAACG,GAAG;EACvB,MAAMC,MAAM,GAAGJ,KAAK,CAACK,GAAG,CAACC,UAAU,CAACJ,KAAK,CAAC;EAE1C,IAAID,MAAM,EAAE;IAAE,OAAO,KAAK;EAAC;EAE3B,IAAIG,MAAM,KAAK,IAAI,UAAS;IAAE,OAAO,KAAK;EAAC;EAE3C,MAAMG,OAAO,GAAGP,KAAK,CAACQ,UAAU,CAACR,KAAK,CAACG,GAAG,EAAE,IAAI,CAAC;EACjD,IAAIM,GAAG,GAAGF,OAAO,CAACG,MAAM;EACxB,MAAMC,EAAE,GAAGC,MAAM,CAACC,YAAY,CAACT,MAAM,CAAC;EAEtC,IAAIK,GAAG,GAAG,CAAC,EAAE;IAAE,OAAO,KAAK;EAAC;EAE5B,IAAIK,KAAK;EAET,IAAIL,GAAG,GAAG,CAAC,EAAE;IACXK,KAAK,GAAWd,KAAK,CAACe,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,CAAC,CAAC;IACzCD,KAAK,CAACE,OAAO,GAAGL,EAAE;IAClBF,GAAG,EAAE;EACP;EAEA,KAAK,IAAIQ,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGR,GAAG,EAAEQ,CAAC,IAAI,CAAC,EAAE;IAC/BH,KAAK,GAAWd,KAAK,CAACe,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,CAAC,CAAC;IACzCD,KAAK,CAACE,OAAO,GAAGL,EAAE,GAAGA,EAAE;IAEvBX,KAAK,CAACkB,UAAU,CAACH,IAAI,CAAC;MACpBX,MAAM;MACNM,MAAM,EAAE,CAAC;MAAM;MACfI,KAAK,EAAEd,KAAK,CAACmB,MAAM,CAACT,MAAM,GAAG,CAAC;MAC9BU,GAAG,EAAE,CAAC,CAAC;MACPC,IAAI,EAAEd,OAAO,CAACe,QAAQ;MACtBC,KAAK,EAAEhB,OAAO,CAACiB;IACjB,CAAC,CAAC;EACJ;EAEAxB,KAAK,CAACG,GAAG,IAAII,OAAO,CAACG,MAAM;EAE3B,OAAO,IAAI;AACb;AAEA,SAASe,WAAWA,CAAEzB,KAAK,EAAEkB,UAAU,EAAE;EACvC,IAAIJ,KAAK;EACT,MAAMY,WAAW,GAAG,EAAE;EACtB,MAAMC,GAAG,GAAGT,UAAU,CAACR,MAAM;EAE7B,KAAK,IAAIO,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGU,GAAG,EAAEV,CAAC,EAAE,EAAE;IAC5B,MAAMW,UAAU,GAAGV,UAAU,CAACD,CAAC,CAAC;IAEhC,IAAIW,UAAU,CAACxB,MAAM,KAAK,IAAI,UAAS;MACrC;IACF;IAEA,IAAIwB,UAAU,CAACR,GAAG,KAAK,CAAC,CAAC,EAAE;MACzB;IACF;IAEA,MAAMS,QAAQ,GAAGX,UAAU,CAACU,UAAU,CAACR,GAAG,CAAC;IAE3CN,KAAK,GAAWd,KAAK,CAACmB,MAAM,CAACS,UAAU,CAACd,KAAK,CAAC;IAC9CA,KAAK,CAACgB,IAAI,GAAM,QAAQ;IACxBhB,KAAK,CAACiB,GAAG,GAAO,GAAG;IACnBjB,KAAK,CAACkB,OAAO,GAAG,CAAC;IACjBlB,KAAK,CAACmB,MAAM,GAAI,IAAI;IACpBnB,KAAK,CAACE,OAAO,GAAG,EAAE;IAElBF,KAAK,GAAWd,KAAK,CAACmB,MAAM,CAACU,QAAQ,CAACf,KAAK,CAAC;IAC5CA,KAAK,CAACgB,IAAI,GAAM,SAAS;IACzBhB,KAAK,CAACiB,GAAG,GAAO,GAAG;IACnBjB,KAAK,CAACkB,OAAO,GAAG,CAAC,CAAC;IAClBlB,KAAK,CAACmB,MAAM,GAAI,IAAI;IACpBnB,KAAK,CAACE,OAAO,GAAG,EAAE;IAElB,IAAIhB,KAAK,CAACmB,MAAM,CAACU,QAAQ,CAACf,KAAK,GAAG,CAAC,CAAC,CAACgB,IAAI,KAAK,MAAM,IAChD9B,KAAK,CAACmB,MAAM,CAACU,QAAQ,CAACf,KAAK,GAAG,CAAC,CAAC,CAACE,OAAO,KAAK,GAAG,EAAE;MACpDU,WAAW,CAACX,IAAI,CAACc,QAAQ,CAACf,KAAK,GAAG,CAAC,CAAC;IACtC;EACF;;EAEA;EACA;EACA;EACA;EACA;EACA;EACA,OAAOY,WAAW,CAAChB,MAAM,EAAE;IACzB,MAAMO,CAAC,GAAGS,WAAW,CAACQ,GAAG,CAAC,CAAC;IAC3B,IAAIC,CAAC,GAAGlB,CAAC,GAAG,CAAC;IAEb,OAAOkB,CAAC,GAAGnC,KAAK,CAACmB,MAAM,CAACT,MAAM,IAAIV,KAAK,CAACmB,MAAM,CAACgB,CAAC,CAAC,CAACL,IAAI,KAAK,SAAS,EAAE;MACpEK,CAAC,EAAE;IACL;IAEAA,CAAC,EAAE;IAEH,IAAIlB,CAAC,KAAKkB,CAAC,EAAE;MACXrB,KAAK,GAAGd,KAAK,CAACmB,MAAM,CAACgB,CAAC,CAAC;MACvBnC,KAAK,CAACmB,MAAM,CAACgB,CAAC,CAAC,GAAGnC,KAAK,CAACmB,MAAM,CAACF,CAAC,CAAC;MACjCjB,KAAK,CAACmB,MAAM,CAACF,CAAC,CAAC,GAAGH,KAAK;IACzB;EACF;AACF;;AAEA;AACA;AACA,SAASsB,yBAAyBA,CAAEpC,KAAK,EAAE;EACzC,MAAMqC,WAAW,GAAGrC,KAAK,CAACqC,WAAW;EACrC,MAAMV,GAAG,GAAG3B,KAAK,CAACqC,WAAW,CAAC3B,MAAM;EAEpCe,WAAW,CAACzB,KAAK,EAAEA,KAAK,CAACkB,UAAU,CAAC;EAEpC,KAAK,IAAIoB,IAAI,GAAG,CAAC,EAAEA,IAAI,GAAGX,GAAG,EAAEW,IAAI,EAAE,EAAE;IACrC,IAAID,WAAW,CAACC,IAAI,CAAC,IAAID,WAAW,CAACC,IAAI,CAAC,CAACpB,UAAU,EAAE;MACrDO,WAAW,CAACzB,KAAK,EAAEqC,WAAW,CAACC,IAAI,CAAC,CAACpB,UAAU,CAAC;IAClD;EACF;AACF;AAEA,eAAe;EACbqB,QAAQ,EAAExC,sBAAsB;EAChC0B,WAAW,EAAEW;AACf,CAAC","ignoreList":[]},"metadata":{},"sourceType":"module","externalDependencies":[]}