06245b8ff79f6cba0a04f3963a8da6fb508055dcd7f2c51b49790885fda0bae2.json 12 KB

1
  1. {"ast":null,"code":"/** internal\n * class ParserBlock\n *\n * Block-level tokenizer.\n **/\n\nimport Ruler from './ruler.mjs';\nimport StateBlock from './rules_block/state_block.mjs';\nimport r_table from './rules_block/table.mjs';\nimport r_code from './rules_block/code.mjs';\nimport r_fence from './rules_block/fence.mjs';\nimport r_blockquote from './rules_block/blockquote.mjs';\nimport r_hr from './rules_block/hr.mjs';\nimport r_list from './rules_block/list.mjs';\nimport r_reference from './rules_block/reference.mjs';\nimport r_html_block from './rules_block/html_block.mjs';\nimport r_heading from './rules_block/heading.mjs';\nimport r_lheading from './rules_block/lheading.mjs';\nimport r_paragraph from './rules_block/paragraph.mjs';\nconst _rules = [\n// First 2 params - rule name & source. Secondary array - list of rules,\n// which can be terminated by this one.\n['table', r_table, ['paragraph', 'reference']], ['code', r_code], ['fence', r_fence, ['paragraph', 'reference', 'blockquote', 'list']], ['blockquote', r_blockquote, ['paragraph', 'reference', 'blockquote', 'list']], ['hr', r_hr, ['paragraph', 'reference', 'blockquote', 'list']], ['list', r_list, ['paragraph', 'reference', 'blockquote']], ['reference', r_reference], ['html_block', r_html_block, ['paragraph', 'reference', 'blockquote']], ['heading', r_heading, ['paragraph', 'reference', 'blockquote']], ['lheading', r_lheading], ['paragraph', r_paragraph]];\n\n/**\n * new ParserBlock()\n **/\nfunction ParserBlock() {\n /**\n * ParserBlock#ruler -> Ruler\n *\n * [[Ruler]] instance. Keep configuration of block rules.\n **/\n this.ruler = new Ruler();\n for (let i = 0; i < _rules.length; i++) {\n this.ruler.push(_rules[i][0], _rules[i][1], {\n alt: (_rules[i][2] || []).slice()\n });\n }\n}\n\n// Generate tokens for input range\n//\nParserBlock.prototype.tokenize = function (state, startLine, endLine) {\n const rules = this.ruler.getRules('');\n const len = rules.length;\n const maxNesting = state.md.options.maxNesting;\n let line = startLine;\n let hasEmptyLines = false;\n while (line < endLine) {\n state.line = line = state.skipEmptyLines(line);\n if (line >= endLine) {\n break;\n }\n\n // Termination condition for nested calls.\n // Nested calls currently used for blockquotes & lists\n if (state.sCount[line] < state.blkIndent) {\n break;\n }\n\n // If nesting level exceeded - skip tail to the end. That's not ordinary\n // situation and we should not care about content.\n if (state.level >= maxNesting) {\n state.line = endLine;\n break;\n }\n\n // Try all possible rules.\n // On success, rule should:\n //\n // - update `state.line`\n // - update `state.tokens`\n // - return true\n const prevLine = state.line;\n let ok = false;\n for (let i = 0; i < len; i++) {\n ok = rules[i](state, line, endLine, false);\n if (ok) {\n if (prevLine >= state.line) {\n throw new Error(\"block rule didn't increment state.line\");\n }\n break;\n }\n }\n\n // this can only happen if user disables paragraph rule\n if (!ok) throw new Error('none of the block rules matched');\n\n // set state.tight if we had an empty line before current tag\n // i.e. latest empty line should not count\n state.tight = !hasEmptyLines;\n\n // paragraph might \"eat\" one newline after it in nested lists\n if (state.isEmpty(state.line - 1)) {\n hasEmptyLines = true;\n }\n line = state.line;\n if (line < endLine && state.isEmpty(line)) {\n hasEmptyLines = true;\n line++;\n state.line = line;\n }\n }\n};\n\n/**\n * ParserBlock.parse(str, md, env, outTokens)\n *\n * Process input string and push block tokens into `outTokens`\n **/\nParserBlock.prototype.parse = function (src, md, env, outTokens) {\n if (!src) {\n return;\n }\n const state = new this.State(src, md, env, outTokens);\n this.tokenize(state, state.line, state.lineMax);\n};\nParserBlock.prototype.State = StateBlock;\nexport default ParserBlock;","map":{"version":3,"names":["Ruler","StateBlock","r_table","r_code","r_fence","r_blockquote","r_hr","r_list","r_reference","r_html_block","r_heading","r_lheading","r_paragraph","_rules","ParserBlock","ruler","i","length","push","alt","slice","prototype","tokenize","state","startLine","endLine","rules","getRules","len","maxNesting","md","options","line","hasEmptyLines","skipEmptyLines","sCount","blkIndent","level","prevLine","ok","Error","tight","isEmpty","parse","src","env","outTokens","State","lineMax"],"sources":["F:/workspace/202226701027/huinongbao-app/node_modules/markdown-it/lib/parser_block.mjs"],"sourcesContent":["/** internal\n * class ParserBlock\n *\n * Block-level tokenizer.\n **/\n\nimport Ruler from './ruler.mjs'\nimport StateBlock from './rules_block/state_block.mjs'\n\nimport r_table from './rules_block/table.mjs'\nimport r_code from './rules_block/code.mjs'\nimport r_fence from './rules_block/fence.mjs'\nimport r_blockquote from './rules_block/blockquote.mjs'\nimport r_hr from './rules_block/hr.mjs'\nimport r_list from './rules_block/list.mjs'\nimport r_reference from './rules_block/reference.mjs'\nimport r_html_block from './rules_block/html_block.mjs'\nimport r_heading from './rules_block/heading.mjs'\nimport r_lheading from './rules_block/lheading.mjs'\nimport r_paragraph from './rules_block/paragraph.mjs'\n\nconst _rules = [\n // First 2 params - rule name & source. Secondary array - list of rules,\n // which can be terminated by this one.\n ['table', r_table, ['paragraph', 'reference']],\n ['code', r_code],\n ['fence', r_fence, ['paragraph', 'reference', 'blockquote', 'list']],\n ['blockquote', r_blockquote, ['paragraph', 'reference', 'blockquote', 'list']],\n ['hr', r_hr, ['paragraph', 'reference', 'blockquote', 'list']],\n ['list', r_list, ['paragraph', 'reference', 'blockquote']],\n ['reference', r_reference],\n ['html_block', r_html_block, ['paragraph', 'reference', 'blockquote']],\n ['heading', r_heading, ['paragraph', 'reference', 'blockquote']],\n ['lheading', r_lheading],\n ['paragraph', r_paragraph]\n]\n\n/**\n * new ParserBlock()\n **/\nfunction ParserBlock () {\n /**\n * ParserBlock#ruler -> Ruler\n *\n * [[Ruler]] instance. Keep configuration of block rules.\n **/\n this.ruler = new Ruler()\n\n for (let i = 0; i < _rules.length; i++) {\n this.ruler.push(_rules[i][0], _rules[i][1], { alt: (_rules[i][2] || []).slice() })\n }\n}\n\n// Generate tokens for input range\n//\nParserBlock.prototype.tokenize = function (state, startLine, endLine) {\n const rules = this.ruler.getRules('')\n const len = rules.length\n const maxNesting = state.md.options.maxNesting\n let line = startLine\n let hasEmptyLines = false\n\n while (line < endLine) {\n state.line = line = state.skipEmptyLines(line)\n if (line >= endLine) { break }\n\n // Termination condition for nested calls.\n // Nested calls currently used for blockquotes & lists\n if (state.sCount[line] < state.blkIndent) { break }\n\n // If nesting level exceeded - skip tail to the end. That's not ordinary\n // situation and we should not care about content.\n if (state.level >= maxNesting) {\n state.line = endLine\n break\n }\n\n // Try all possible rules.\n // On success, rule should:\n //\n // - update `state.line`\n // - update `state.tokens`\n // - return true\n const prevLine = state.line\n let ok = false\n\n for (let i = 0; i < len; i++) {\n ok = rules[i](state, line, endLine, false)\n if (ok) {\n if (prevLine >= state.line) {\n throw new Error(\"block rule didn't increment state.line\")\n }\n break\n }\n }\n\n // this can only happen if user disables paragraph rule\n if (!ok) throw new Error('none of the block rules matched')\n\n // set state.tight if we had an empty line before current tag\n // i.e. latest empty line should not count\n state.tight = !hasEmptyLines\n\n // paragraph might \"eat\" one newline after it in nested lists\n if (state.isEmpty(state.line - 1)) {\n hasEmptyLines = true\n }\n\n line = state.line\n\n if (line < endLine && state.isEmpty(line)) {\n hasEmptyLines = true\n line++\n state.line = line\n }\n }\n}\n\n/**\n * ParserBlock.parse(str, md, env, outTokens)\n *\n * Process input string and push block tokens into `outTokens`\n **/\nParserBlock.prototype.parse = function (src, md, env, outTokens) {\n if (!src) { return }\n\n const state = new this.State(src, md, env, outTokens)\n\n this.tokenize(state, state.line, state.lineMax)\n}\n\nParserBlock.prototype.State = StateBlock\n\nexport default ParserBlock\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;;AAEA,OAAOA,KAAK,MAAM,aAAa;AAC/B,OAAOC,UAAU,MAAM,+BAA+B;AAEtD,OAAOC,OAAO,MAAM,yBAAyB;AAC7C,OAAOC,MAAM,MAAM,wBAAwB;AAC3C,OAAOC,OAAO,MAAM,yBAAyB;AAC7C,OAAOC,YAAY,MAAM,8BAA8B;AACvD,OAAOC,IAAI,MAAM,sBAAsB;AACvC,OAAOC,MAAM,MAAM,wBAAwB;AAC3C,OAAOC,WAAW,MAAM,6BAA6B;AACrD,OAAOC,YAAY,MAAM,8BAA8B;AACvD,OAAOC,SAAS,MAAM,2BAA2B;AACjD,OAAOC,UAAU,MAAM,4BAA4B;AACnD,OAAOC,WAAW,MAAM,6BAA6B;AAErD,MAAMC,MAAM,GAAG;AACb;AACA;AACA,CAAC,OAAO,EAAOX,OAAO,EAAO,CAAC,WAAW,EAAE,WAAW,CAAC,CAAC,EACxD,CAAC,MAAM,EAAQC,MAAM,CAAC,EACtB,CAAC,OAAO,EAAOC,OAAO,EAAO,CAAC,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC,EAC9E,CAAC,YAAY,EAAEC,YAAY,EAAE,CAAC,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC,EAC9E,CAAC,IAAI,EAAUC,IAAI,EAAU,CAAC,WAAW,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,CAAC,CAAC,EAC9E,CAAC,MAAM,EAAQC,MAAM,EAAQ,CAAC,WAAW,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,EACtE,CAAC,WAAW,EAAGC,WAAW,CAAC,EAC3B,CAAC,YAAY,EAAEC,YAAY,EAAE,CAAC,WAAW,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,EACtE,CAAC,SAAS,EAAKC,SAAS,EAAK,CAAC,WAAW,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,EACtE,CAAC,UAAU,EAAIC,UAAU,CAAC,EAC1B,CAAC,WAAW,EAAGC,WAAW,CAAC,CAC5B;;AAED;AACA;AACA;AACA,SAASE,WAAWA,CAAA,EAAI;EACtB;AACF;AACA;AACA;AACA;EACE,IAAI,CAACC,KAAK,GAAG,IAAIf,KAAK,CAAC,CAAC;EAExB,KAAK,IAAIgB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGH,MAAM,CAACI,MAAM,EAAED,CAAC,EAAE,EAAE;IACtC,IAAI,CAACD,KAAK,CAACG,IAAI,CAACL,MAAM,CAACG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAEH,MAAM,CAACG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE;MAAEG,GAAG,EAAE,CAACN,MAAM,CAACG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,EAAEI,KAAK,CAAC;IAAE,CAAC,CAAC;EACpF;AACF;;AAEA;AACA;AACAN,WAAW,CAACO,SAAS,CAACC,QAAQ,GAAG,UAAUC,KAAK,EAAEC,SAAS,EAAEC,OAAO,EAAE;EACpE,MAAMC,KAAK,GAAG,IAAI,CAACX,KAAK,CAACY,QAAQ,CAAC,EAAE,CAAC;EACrC,MAAMC,GAAG,GAAGF,KAAK,CAACT,MAAM;EACxB,MAAMY,UAAU,GAAGN,KAAK,CAACO,EAAE,CAACC,OAAO,CAACF,UAAU;EAC9C,IAAIG,IAAI,GAAGR,SAAS;EACpB,IAAIS,aAAa,GAAG,KAAK;EAEzB,OAAOD,IAAI,GAAGP,OAAO,EAAE;IACrBF,KAAK,CAACS,IAAI,GAAGA,IAAI,GAAGT,KAAK,CAACW,cAAc,CAACF,IAAI,CAAC;IAC9C,IAAIA,IAAI,IAAIP,OAAO,EAAE;MAAE;IAAM;;IAE7B;IACA;IACA,IAAIF,KAAK,CAACY,MAAM,CAACH,IAAI,CAAC,GAAGT,KAAK,CAACa,SAAS,EAAE;MAAE;IAAM;;IAElD;IACA;IACA,IAAIb,KAAK,CAACc,KAAK,IAAIR,UAAU,EAAE;MAC7BN,KAAK,CAACS,IAAI,GAAGP,OAAO;MACpB;IACF;;IAEA;IACA;IACA;IACA;IACA;IACA;IACA,MAAMa,QAAQ,GAAGf,KAAK,CAACS,IAAI;IAC3B,IAAIO,EAAE,GAAG,KAAK;IAEd,KAAK,IAAIvB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGY,GAAG,EAAEZ,CAAC,EAAE,EAAE;MAC5BuB,EAAE,GAAGb,KAAK,CAACV,CAAC,CAAC,CAACO,KAAK,EAAES,IAAI,EAAEP,OAAO,EAAE,KAAK,CAAC;MAC1C,IAAIc,EAAE,EAAE;QACN,IAAID,QAAQ,IAAIf,KAAK,CAACS,IAAI,EAAE;UAC1B,MAAM,IAAIQ,KAAK,CAAC,wCAAwC,CAAC;QAC3D;QACA;MACF;IACF;;IAEA;IACA,IAAI,CAACD,EAAE,EAAE,MAAM,IAAIC,KAAK,CAAC,iCAAiC,CAAC;;IAE3D;IACA;IACAjB,KAAK,CAACkB,KAAK,GAAG,CAACR,aAAa;;IAE5B;IACA,IAAIV,KAAK,CAACmB,OAAO,CAACnB,KAAK,CAACS,IAAI,GAAG,CAAC,CAAC,EAAE;MACjCC,aAAa,GAAG,IAAI;IACtB;IAEAD,IAAI,GAAGT,KAAK,CAACS,IAAI;IAEjB,IAAIA,IAAI,GAAGP,OAAO,IAAIF,KAAK,CAACmB,OAAO,CAACV,IAAI,CAAC,EAAE;MACzCC,aAAa,GAAG,IAAI;MACpBD,IAAI,EAAE;MACNT,KAAK,CAACS,IAAI,GAAGA,IAAI;IACnB;EACF;AACF,CAAC;;AAED;AACA;AACA;AACA;AACA;AACAlB,WAAW,CAACO,SAAS,CAACsB,KAAK,GAAG,UAAUC,GAAG,EAAEd,EAAE,EAAEe,GAAG,EAAEC,SAAS,EAAE;EAC/D,IAAI,CAACF,GAAG,EAAE;IAAE;EAAO;EAEnB,MAAMrB,KAAK,GAAG,IAAI,IAAI,CAACwB,KAAK,CAACH,GAAG,EAAEd,EAAE,EAAEe,GAAG,EAAEC,SAAS,CAAC;EAErD,IAAI,CAACxB,QAAQ,CAACC,KAAK,EAAEA,KAAK,CAACS,IAAI,EAAET,KAAK,CAACyB,OAAO,CAAC;AACjD,CAAC;AAEDlC,WAAW,CAACO,SAAS,CAAC0B,KAAK,GAAG9C,UAAU;AAExC,eAAea,WAAW","ignoreList":[]},"metadata":{},"sourceType":"module","externalDependencies":[]}