{"ast":null,"code":"/** internal\n * class ParserInline\n *\n * Tokenizes paragraph content.\n **/\n\nimport Ruler from './ruler.mjs';\nimport StateInline from './rules_inline/state_inline.mjs';\nimport r_text from './rules_inline/text.mjs';\nimport r_linkify from './rules_inline/linkify.mjs';\nimport r_newline from './rules_inline/newline.mjs';\nimport r_escape from './rules_inline/escape.mjs';\nimport r_backticks from './rules_inline/backticks.mjs';\nimport r_strikethrough from './rules_inline/strikethrough.mjs';\nimport r_emphasis from './rules_inline/emphasis.mjs';\nimport r_link from './rules_inline/link.mjs';\nimport r_image from './rules_inline/image.mjs';\nimport r_autolink from './rules_inline/autolink.mjs';\nimport r_html_inline from './rules_inline/html_inline.mjs';\nimport r_entity from './rules_inline/entity.mjs';\nimport r_balance_pairs from './rules_inline/balance_pairs.mjs';\nimport r_fragments_join from './rules_inline/fragments_join.mjs';\n\n// Parser rules\n\nconst _rules = [['text', r_text], ['linkify', r_linkify], ['newline', r_newline], ['escape', r_escape], ['backticks', r_backticks], ['strikethrough', r_strikethrough.tokenize], ['emphasis', r_emphasis.tokenize], ['link', r_link], ['image', r_image], ['autolink', r_autolink], ['html_inline', r_html_inline], ['entity', r_entity]];\n\n// `rule2` ruleset was created specifically for emphasis/strikethrough\n// post-processing and may be changed in the future.\n//\n// Don't use this for anything except pairs (plugins working with `balance_pairs`).\n//\nconst _rules2 = [['balance_pairs', r_balance_pairs], ['strikethrough', r_strikethrough.postProcess], ['emphasis', r_emphasis.postProcess],\n// rules for pairs separate '**' into its own text tokens, which may be left unused,\n// rule below merges unused segments back with the rest of the text\n['fragments_join', r_fragments_join]];\n\n/**\n * new ParserInline()\n **/\nfunction ParserInline() {\n /**\n * ParserInline#ruler -> Ruler\n *\n * [[Ruler]] instance. Keep configuration of inline rules.\n **/\n this.ruler = new Ruler();\n for (let i = 0; i < _rules.length; i++) {\n this.ruler.push(_rules[i][0], _rules[i][1]);\n }\n\n /**\n * ParserInline#ruler2 -> Ruler\n *\n * [[Ruler]] instance. Second ruler used for post-processing\n * (e.g. in emphasis-like rules).\n **/\n this.ruler2 = new Ruler();\n for (let i = 0; i < _rules2.length; i++) {\n this.ruler2.push(_rules2[i][0], _rules2[i][1]);\n }\n}\n\n// Skip single token by running all rules in validation mode;\n// returns `true` if any rule reported success\n//\nParserInline.prototype.skipToken = function (state) {\n const pos = state.pos;\n const rules = this.ruler.getRules('');\n const len = rules.length;\n const maxNesting = state.md.options.maxNesting;\n const cache = state.cache;\n if (typeof cache[pos] !== 'undefined') {\n state.pos = cache[pos];\n return;\n }\n let ok = false;\n if (state.level < maxNesting) {\n for (let i = 0; i < len; i++) {\n // Increment state.level and decrement it later to limit recursion.\n // It's harmless to do here, because no tokens are created. But ideally,\n // we'd need a separate private state variable for this purpose.\n //\n state.level++;\n ok = rules[i](state, true);\n state.level--;\n if (ok) {\n if (pos >= state.pos) {\n throw new Error(\"inline rule didn't increment state.pos\");\n }\n break;\n }\n }\n } else {\n // Too much nesting, just skip until the end of the paragraph.\n //\n // NOTE: this will cause links to behave incorrectly in the following case,\n // when an amount of `[` is exactly equal to `maxNesting + 1`:\n //\n // [[[[[[[[[[[[[[[[[[[[[foo]()\n //\n // TODO: remove this workaround when CM standard will allow nested links\n // (we can replace it by preventing links from being parsed in\n // validation mode)\n //\n state.pos = state.posMax;\n }\n if (!ok) {\n state.pos++;\n }\n cache[pos] = state.pos;\n};\n\n// Generate tokens for input range\n//\nParserInline.prototype.tokenize = function (state) {\n const rules = this.ruler.getRules('');\n const len = rules.length;\n const end = state.posMax;\n const maxNesting = state.md.options.maxNesting;\n while (state.pos < end) {\n // Try all possible rules.\n // On success, rule should:\n //\n // - update `state.pos`\n // - update `state.tokens`\n // - return true\n const prevPos = state.pos;\n let ok = false;\n if (state.level < maxNesting) {\n for (let i = 0; i < len; i++) {\n ok = rules[i](state, false);\n if (ok) {\n if (prevPos >= state.pos) {\n throw new Error(\"inline rule didn't increment state.pos\");\n }\n break;\n }\n }\n }\n if (ok) {\n if (state.pos >= end) {\n break;\n }\n continue;\n }\n state.pending += state.src[state.pos++];\n }\n if (state.pending) {\n state.pushPending();\n }\n};\n\n/**\n * ParserInline.parse(str, md, env, outTokens)\n *\n * Process input string and push inline tokens into `outTokens`\n **/\nParserInline.prototype.parse = function (str, md, env, outTokens) {\n const state = new this.State(str, md, env, outTokens);\n this.tokenize(state);\n const rules = this.ruler2.getRules('');\n const len = rules.length;\n for (let i = 0; i < len; i++) {\n rules[i](state);\n }\n};\nParserInline.prototype.State = StateInline;\nexport default ParserInline;","map":{"version":3,"names":["Ruler","StateInline","r_text","r_linkify","r_newline","r_escape","r_backticks","r_strikethrough","r_emphasis","r_link","r_image","r_autolink","r_html_inline","r_entity","r_balance_pairs","r_fragments_join","_rules","tokenize","_rules2","postProcess","ParserInline","ruler","i","length","push","ruler2","prototype","skipToken","state","pos","rules","getRules","len","maxNesting","md","options","cache","ok","level","Error","posMax","end","prevPos","pending","src","pushPending","parse","str","env","outTokens","State"],"sources":["F:/workspace/202226701027/huinongbao-app/node_modules/markdown-it/lib/parser_inline.mjs"],"sourcesContent":["/** internal\n * class ParserInline\n *\n * Tokenizes paragraph content.\n **/\n\nimport Ruler from './ruler.mjs'\nimport StateInline from './rules_inline/state_inline.mjs'\n\nimport r_text from './rules_inline/text.mjs'\nimport r_linkify from './rules_inline/linkify.mjs'\nimport r_newline from './rules_inline/newline.mjs'\nimport r_escape from './rules_inline/escape.mjs'\nimport r_backticks from './rules_inline/backticks.mjs'\nimport r_strikethrough from './rules_inline/strikethrough.mjs'\nimport r_emphasis from './rules_inline/emphasis.mjs'\nimport r_link from './rules_inline/link.mjs'\nimport r_image from './rules_inline/image.mjs'\nimport r_autolink from './rules_inline/autolink.mjs'\nimport r_html_inline from './rules_inline/html_inline.mjs'\nimport r_entity from './rules_inline/entity.mjs'\n\nimport r_balance_pairs from './rules_inline/balance_pairs.mjs'\nimport r_fragments_join from './rules_inline/fragments_join.mjs'\n\n// Parser rules\n\nconst _rules = [\n ['text', r_text],\n ['linkify', r_linkify],\n ['newline', r_newline],\n ['escape', r_escape],\n ['backticks', r_backticks],\n ['strikethrough', r_strikethrough.tokenize],\n ['emphasis', r_emphasis.tokenize],\n ['link', r_link],\n ['image', r_image],\n ['autolink', r_autolink],\n ['html_inline', r_html_inline],\n ['entity', r_entity]\n]\n\n// `rule2` ruleset was created specifically for emphasis/strikethrough\n// post-processing and may be changed in the future.\n//\n// Don't use this for anything except pairs (plugins working with `balance_pairs`).\n//\nconst _rules2 = [\n ['balance_pairs', r_balance_pairs],\n ['strikethrough', r_strikethrough.postProcess],\n ['emphasis', r_emphasis.postProcess],\n // rules for pairs separate '**' into its own text tokens, which may be left unused,\n // rule below merges unused segments back with the rest of the text\n ['fragments_join', r_fragments_join]\n]\n\n/**\n * new ParserInline()\n **/\nfunction ParserInline () {\n /**\n * ParserInline#ruler -> Ruler\n *\n * [[Ruler]] instance. Keep configuration of inline rules.\n **/\n this.ruler = new Ruler()\n\n for (let i = 0; i < _rules.length; i++) {\n this.ruler.push(_rules[i][0], _rules[i][1])\n }\n\n /**\n * ParserInline#ruler2 -> Ruler\n *\n * [[Ruler]] instance. Second ruler used for post-processing\n * (e.g. in emphasis-like rules).\n **/\n this.ruler2 = new Ruler()\n\n for (let i = 0; i < _rules2.length; i++) {\n this.ruler2.push(_rules2[i][0], _rules2[i][1])\n }\n}\n\n// Skip single token by running all rules in validation mode;\n// returns `true` if any rule reported success\n//\nParserInline.prototype.skipToken = function (state) {\n const pos = state.pos\n const rules = this.ruler.getRules('')\n const len = rules.length\n const maxNesting = state.md.options.maxNesting\n const cache = state.cache\n\n if (typeof cache[pos] !== 'undefined') {\n state.pos = cache[pos]\n return\n }\n\n let ok = false\n\n if (state.level < maxNesting) {\n for (let i = 0; i < len; i++) {\n // Increment state.level and decrement it later to limit recursion.\n // It's harmless to do here, because no tokens are created. But ideally,\n // we'd need a separate private state variable for this purpose.\n //\n state.level++\n ok = rules[i](state, true)\n state.level--\n\n if (ok) {\n if (pos >= state.pos) { throw new Error(\"inline rule didn't increment state.pos\") }\n break\n }\n }\n } else {\n // Too much nesting, just skip until the end of the paragraph.\n //\n // NOTE: this will cause links to behave incorrectly in the following case,\n // when an amount of `[` is exactly equal to `maxNesting + 1`:\n //\n // [[[[[[[[[[[[[[[[[[[[[foo]()\n //\n // TODO: remove this workaround when CM standard will allow nested links\n // (we can replace it by preventing links from being parsed in\n // validation mode)\n //\n state.pos = state.posMax\n }\n\n if (!ok) { state.pos++ }\n cache[pos] = state.pos\n}\n\n// Generate tokens for input range\n//\nParserInline.prototype.tokenize = function (state) {\n const rules = this.ruler.getRules('')\n const len = rules.length\n const end = state.posMax\n const maxNesting = state.md.options.maxNesting\n\n while (state.pos < end) {\n // Try all possible rules.\n // On success, rule should:\n //\n // - update `state.pos`\n // - update `state.tokens`\n // - return true\n const prevPos = state.pos\n let ok = false\n\n if (state.level < maxNesting) {\n for (let i = 0; i < len; i++) {\n ok = rules[i](state, false)\n if (ok) {\n if (prevPos >= state.pos) { throw new Error(\"inline rule didn't increment state.pos\") }\n break\n }\n }\n }\n\n if (ok) {\n if (state.pos >= end) { break }\n continue\n }\n\n state.pending += state.src[state.pos++]\n }\n\n if (state.pending) {\n state.pushPending()\n }\n}\n\n/**\n * ParserInline.parse(str, md, env, outTokens)\n *\n * Process input string and push inline tokens into `outTokens`\n **/\nParserInline.prototype.parse = function (str, md, env, outTokens) {\n const state = new this.State(str, md, env, outTokens)\n\n this.tokenize(state)\n\n const rules = this.ruler2.getRules('')\n const len = rules.length\n\n for (let i = 0; i < len; i++) {\n rules[i](state)\n }\n}\n\nParserInline.prototype.State = StateInline\n\nexport default ParserInline\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;;AAEA,OAAOA,KAAK,MAAM,aAAa;AAC/B,OAAOC,WAAW,MAAM,iCAAiC;AAEzD,OAAOC,MAAM,MAAM,yBAAyB;AAC5C,OAAOC,SAAS,MAAM,4BAA4B;AAClD,OAAOC,SAAS,MAAM,4BAA4B;AAClD,OAAOC,QAAQ,MAAM,2BAA2B;AAChD,OAAOC,WAAW,MAAM,8BAA8B;AACtD,OAAOC,eAAe,MAAM,kCAAkC;AAC9D,OAAOC,UAAU,MAAM,6BAA6B;AACpD,OAAOC,MAAM,MAAM,yBAAyB;AAC5C,OAAOC,OAAO,MAAM,0BAA0B;AAC9C,OAAOC,UAAU,MAAM,6BAA6B;AACpD,OAAOC,aAAa,MAAM,gCAAgC;AAC1D,OAAOC,QAAQ,MAAM,2BAA2B;AAEhD,OAAOC,eAAe,MAAM,kCAAkC;AAC9D,OAAOC,gBAAgB,MAAM,mCAAmC;;AAEhE;;AAEA,MAAMC,MAAM,GAAG,CACb,CAAC,MAAM,EAAad,MAAM,CAAC,EAC3B,CAAC,SAAS,EAAUC,SAAS,CAAC,EAC9B,CAAC,SAAS,EAAUC,SAAS,CAAC,EAC9B,CAAC,QAAQ,EAAWC,QAAQ,CAAC,EAC7B,CAAC,WAAW,EAAQC,WAAW,CAAC,EAChC,CAAC,eAAe,EAAIC,eAAe,CAACU,QAAQ,CAAC,EAC7C,CAAC,UAAU,EAAST,UAAU,CAACS,QAAQ,CAAC,EACxC,CAAC,MAAM,EAAaR,MAAM,CAAC,EAC3B,CAAC,OAAO,EAAYC,OAAO,CAAC,EAC5B,CAAC,UAAU,EAASC,UAAU,CAAC,EAC/B,CAAC,aAAa,EAAMC,aAAa,CAAC,EAClC,CAAC,QAAQ,EAAWC,QAAQ,CAAC,CAC9B;;AAED;AACA;AACA;AACA;AACA;AACA,MAAMK,OAAO,GAAG,CACd,CAAC,eAAe,EAAIJ,eAAe,CAAC,EACpC,CAAC,eAAe,EAAIP,eAAe,CAACY,WAAW,CAAC,EAChD,CAAC,UAAU,EAASX,UAAU,CAACW,WAAW,CAAC;AAC3C;AACA;AACA,CAAC,gBAAgB,EAAGJ,gBAAgB,CAAC,CACtC;;AAED;AACA;AACA;AACA,SAASK,YAAYA,CAAA,EAAI;EACvB;AACF;AACA;AACA;AACA;EACE,IAAI,CAACC,KAAK,GAAG,IAAIrB,KAAK,CAAC,CAAC;EAExB,KAAK,IAAIsB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGN,MAAM,CAACO,MAAM,EAAED,CAAC,EAAE,EAAE;IACtC,IAAI,CAACD,KAAK,CAACG,IAAI,CAACR,MAAM,CAACM,CAAC,CAAC,CAAC,CAAC,CAAC,EAAEN,MAAM,CAACM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;EAC7C;;EAEA;AACF;AACA;AACA;AACA;AACA;EACE,IAAI,CAACG,MAAM,GAAG,IAAIzB,KAAK,CAAC,CAAC;EAEzB,KAAK,IAAIsB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGJ,OAAO,CAACK,MAAM,EAAED,CAAC,EAAE,EAAE;IACvC,IAAI,CAACG,MAAM,CAACD,IAAI,CAACN,OAAO,CAACI,CAAC,CAAC,CAAC,CAAC,CAAC,EAAEJ,OAAO,CAACI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;EAChD;AACF;;AAEA;AACA;AACA;AACAF,YAAY,CAACM,SAAS,CAACC,SAAS,GAAG,UAAUC,KAAK,EAAE;EAClD,MAAMC,GAAG,GAAGD,KAAK,CAACC,GAAG;EACrB,MAAMC,KAAK,GAAG,IAAI,CAACT,KAAK,CAACU,QAAQ,CAAC,EAAE,CAAC;EACrC,MAAMC,GAAG,GAAGF,KAAK,CAACP,MAAM;EACxB,MAAMU,UAAU,GAAGL,KAAK,CAACM,EAAE,CAACC,OAAO,CAACF,UAAU;EAC9C,MAAMG,KAAK,GAAGR,KAAK,CAACQ,KAAK;EAEzB,IAAI,OAAOA,KAAK,CAACP,GAAG,CAAC,KAAK,WAAW,EAAE;IACrCD,KAAK,CAACC,GAAG,GAAGO,KAAK,CAACP,GAAG,CAAC;IACtB;EACF;EAEA,IAAIQ,EAAE,GAAG,KAAK;EAEd,IAAIT,KAAK,CAACU,KAAK,GAAGL,UAAU,EAAE;IAC5B,KAAK,IAAIX,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGU,GAAG,EAAEV,CAAC,EAAE,EAAE;MAC5B;MACA;MACA;MACA;MACAM,KAAK,CAACU,KAAK,EAAE;MACbD,EAAE,GAAGP,KAAK,CAACR,CAAC,CAAC,CAACM,KAAK,EAAE,IAAI,CAAC;MAC1BA,KAAK,CAACU,KAAK,EAAE;MAEb,IAAID,EAAE,EAAE;QACN,IAAIR,GAAG,IAAID,KAAK,CAACC,GAAG,EAAE;UAAE,MAAM,IAAIU,KAAK,CAAC,wCAAwC,CAAC;QAAC;QAClF;MACF;IACF;EACF,CAAC,MAAM;IACL;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACAX,KAAK,CAACC,GAAG,GAAGD,KAAK,CAACY,MAAM;EAC1B;EAEA,IAAI,CAACH,EAAE,EAAE;IAAET,KAAK,CAACC,GAAG,EAAE;EAAC;EACvBO,KAAK,CAACP,GAAG,CAAC,GAAGD,KAAK,CAACC,GAAG;AACxB,CAAC;;AAED;AACA;AACAT,YAAY,CAACM,SAAS,CAACT,QAAQ,GAAG,UAAUW,KAAK,EAAE;EACjD,MAAME,KAAK,GAAG,IAAI,CAACT,KAAK,CAACU,QAAQ,CAAC,EAAE,CAAC;EACrC,MAAMC,GAAG,GAAGF,KAAK,CAACP,MAAM;EACxB,MAAMkB,GAAG,GAAGb,KAAK,CAACY,MAAM;EACxB,MAAMP,UAAU,GAAGL,KAAK,CAACM,EAAE,CAACC,OAAO,CAACF,UAAU;EAE9C,OAAOL,KAAK,CAACC,GAAG,GAAGY,GAAG,EAAE;IACtB;IACA;IACA;IACA;IACA;IACA;IACA,MAAMC,OAAO,GAAGd,KAAK,CAACC,GAAG;IACzB,IAAIQ,EAAE,GAAG,KAAK;IAEd,IAAIT,KAAK,CAACU,KAAK,GAAGL,UAAU,EAAE;MAC5B,KAAK,IAAIX,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGU,GAAG,EAAEV,CAAC,EAAE,EAAE;QAC5Be,EAAE,GAAGP,KAAK,CAACR,CAAC,CAAC,CAACM,KAAK,EAAE,KAAK,CAAC;QAC3B,IAAIS,EAAE,EAAE;UACN,IAAIK,OAAO,IAAId,KAAK,CAACC,GAAG,EAAE;YAAE,MAAM,IAAIU,KAAK,CAAC,wCAAwC,CAAC;UAAC;UACtF;QACF;MACF;IACF;IAEA,IAAIF,EAAE,EAAE;MACN,IAAIT,KAAK,CAACC,GAAG,IAAIY,GAAG,EAAE;QAAE;MAAM;MAC9B;IACF;IAEAb,KAAK,CAACe,OAAO,IAAIf,KAAK,CAACgB,GAAG,CAAChB,KAAK,CAACC,GAAG,EAAE,CAAC;EACzC;EAEA,IAAID,KAAK,CAACe,OAAO,EAAE;IACjBf,KAAK,CAACiB,WAAW,CAAC,CAAC;EACrB;AACF,CAAC;;AAED;AACA;AACA;AACA;AACA;AACAzB,YAAY,CAACM,SAAS,CAACoB,KAAK,GAAG,UAAUC,GAAG,EAAEb,EAAE,EAAEc,GAAG,EAAEC,SAAS,EAAE;EAChE,MAAMrB,KAAK,GAAG,IAAI,IAAI,CAACsB,KAAK,CAACH,GAAG,EAAEb,EAAE,EAAEc,GAAG,EAAEC,SAAS,CAAC;EAErD,IAAI,CAAChC,QAAQ,CAACW,KAAK,CAAC;EAEpB,MAAME,KAAK,GAAG,IAAI,CAACL,MAAM,CAACM,QAAQ,CAAC,EAAE,CAAC;EACtC,MAAMC,GAAG,GAAGF,KAAK,CAACP,MAAM;EAExB,KAAK,IAAID,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGU,GAAG,EAAEV,CAAC,EAAE,EAAE;IAC5BQ,KAAK,CAACR,CAAC,CAAC,CAACM,KAAK,CAAC;EACjB;AACF,CAAC;AAEDR,YAAY,CAACM,SAAS,CAACwB,KAAK,GAAGjD,WAAW;AAE1C,eAAemB,YAAY","ignoreList":[]},"metadata":{},"sourceType":"module","externalDependencies":[]}