AutoLink Plugin

name

AutoLink

description

A plugin to creates links your existing notes automatically.

icon

insert_link

instructions

The plugin automatically links words in your selected text to existing notes (and sections of existing notes).



Steps to autolink selected text:
- Select the desired text and click on the AutoLink option.
- The plugin will create links.

Steps to autolink notes:
- Select Plugin > AutoLink from the note menu.
- Select desired links for autolinking. The plugin will create the links.

Common FAQ:
Q) How do I enable autolinking to sections of existing notes?

setting

Min Page Name Length (default: 3)

setting

Autolink Related Notes Section (default: false)


linkAutoLink (v2.2.0)


linkCode

/***
* Source Code: https://github.com/debanjandhar12/my-amplenote-plugins-v2
* Author: debajandhar12
* Build: production
* Character Count: 194101 (0.19 M)
* Target Folder: src-autolink
***/
(() => {
var __defProp = Object.defineProperty;
var __export = (target, all2) => {
for (var name in all2)
__defProp(target, name, { get: all2[name], enumerable: !0 });
};
 
// src-autolink/constants.js
var MIN_PAGE_LENGTH_SETTING_DEFAULT = "3", AUTOLINK_RELATED_NOTES_SECTION_SETTING_DEFAULT = "false", MIN_PAGE_LENGTH_SETTING = `Min Page Name Length (default: ${MIN_PAGE_LENGTH_SETTING_DEFAULT})`, AUTOLINK_RELATED_NOTES_SECTION_SETTING = `Autolink Related Notes Section (default: ${AUTOLINK_RELATED_NOTES_SECTION_SETTING_DEFAULT})`;
 
// package.json
var package_default = {
name: "amplenote-plugin-dev-env",
author: "debajandhar12",
version: "1.0.0",
type: "module",
repository: "https://github.com/debanjandhar12/my-amplenote-plugins-v2",
dependencies: {
"@debanjandhar12/markmap-lib": "0.17.0-mod.3",
"@debanjandhar12/markmap-toolbar": "0.17.0-mod.3",
"@debanjandhar12/markmap-view": "0.17.0-mod.3",
"date-fns": "^3.6.0",
"lodash-es": "^4.17.21",
luxon: "^3.4.4",
"markdown-escape": "^2.0.0",
"@omnivore-app/api": "^1.0.4",
"markdown-it": "^14.1.0",
"remark-gfm": "4.0.0",
"remark-parse": "11.0.0",
unified: "11.0.0",
"unist-util-visit": "^5.0.0",
"unist-util-visit-parents": "6.0.1",
"d3-svg-to-png": "^0.3.1",
"chart.js": "^4.4.3",
fparser: "^3.1.0",
nanoid: "^5.0.7",
zod: "^3.23.8"
},
devDependencies: {
"@types/jest": "^29.5.0",
cors: "^2.8.5",
"cross-fetch": "^4.0.0",
dotenv: "^16.0.3",
esbuild: "0.23.1",
"esbuild-jest2": "0.6.7",
"esbuild-plugins-node-modules-polyfill": "1.6.6",
express: "^4.19.2",
"isomorphic-fetch": "^3.0.0",
jest: "^29.5.0",
"jest-environment-jsdom": "^29.5.0",
jsdom: "^24.1.0",
"make-synchronous": "^1.0.0",
playwright: "^1.47.2",
"core-js": "^3.38.1"
},
scripts: {
test: "jest --testPathPattern=src-autolink",
"test:watch": "jest --testPathPattern=src-autolink --watch",
"build:prod": "NODE_ENV=production node esbuild.js $(pwd)/src-autolink",
"build:dev": "node esbuild.js $(pwd)/src-autolink --watch --server"
}
};
 
// common-utils/dynamic-import-esm.js
var dynamicImportESM = async (pkg, pkgVersion = null) => {
if (0) {
try {
} catch (e) {
}
try {
} catch (e) {
}
}
let cdnList = ["https://esm.sh/", "https://esm.run/"], resolvedVersion = resolvePackageVersion(pkg, pkgVersion), importCompleted = !1, importPromises = cdnList.map(async (cdn) => {
let url = buildCDNUrl(cdn, pkg, resolvedVersion);
if (cdn !== "https://esm.sh/" && await new Promise((resolve) => setTimeout(resolve, 600)), importCompleted)
throw new Error(`Terminating as ${pkg} has already been imported`);
return import(url).then((module2) => ({ module: module2, url })).catch((e) => {
throw console.warn(`Failed to import ${pkg} from ${cdn}: ${e.message}`), e;
});
});
try {
let result = await Promise.any(importPromises);
return importCompleted = !0, console.log(`Imported ${pkg}@${resolvedVersion} from ${result.url}`), result.module;
} catch {
throw new Error(`Failed to import ${pkg} from all available CDNs`);
}
throw new Error(`Failed to import ${pkg} from all available CDNs`);
};
function getBasePackage(pkg) {
if (pkg.startsWith("@")) {
let [scope, name] = pkg.split("/");
return `${scope}/${name}`;
}
return pkg.split("/")[0];
}
function resolvePackageVersion(pkg, pkgVersion) {
let basePkg = getBasePackage(pkg), version = pkgVersion || package_default.dependencies[basePkg] || package_default.devDependencies[basePkg] || "latest";
return version.startsWith("^") || version.startsWith("~") ? version.substring(1) : version;
}
function buildCDNUrl(cdn, pkg, version) {
let folders = [];
pkg.startsWith("@") ? [, , ...folders] = pkg.split("/") : [, ...folders] = pkg.split("/");
let basePkg = getBasePackage(pkg), versionString = version !== "latest" ? `@${version}` : "", folderString = folders && folders.length > 0 ? `/${folders.join("/")}` : "", url = `${cdn}${basePkg}${versionString}${folderString}`;
return ["https://esm.sh/"].includes(cdn) ? url + "?bundle-deps" : url;
}
var dynamic_import_esm_default = dynamicImportESM;
 
// node_modules/mdast-util-to-string/lib/index.js
var emptyOptions = {};
function toString(value, options) {
let settings = options || emptyOptions, includeImageAlt = typeof settings.includeImageAlt == "boolean" ? settings.includeImageAlt : !0, includeHtml = typeof settings.includeHtml == "boolean" ? settings.includeHtml : !0;
return one(value, includeImageAlt, includeHtml);
}
function one(value, includeImageAlt, includeHtml) {
if (node(value)) {
if ("value" in value)
return value.type === "html" && !includeHtml ? "" : value.value;
if (includeImageAlt && "alt" in value && value.alt)
return value.alt;
if ("children" in value)
return all(value.children, includeImageAlt, includeHtml);
}
return Array.isArray(value) ? all(value, includeImageAlt, includeHtml) : "";
}
function all(values, includeImageAlt, includeHtml) {
let result = [], index2 = -1;
for (; ++index2 < values.length; )
result[index2] = one(values[index2], includeImageAlt, includeHtml);
return result.join("");
}
function node(value) {
return !!(value && typeof value == "object");
}
 
// node_modules/decode-named-character-reference/index.dom.js
var element = document.createElement("i");
function decodeNamedCharacterReference(value) {
let characterReference2 = "&" + value + ";";
element.innerHTML = characterReference2;
let char = element.textContent;
return char.charCodeAt(char.length - 1) === 59 && value !== "semi" || char === characterReference2 ? !1 : char;
}
 
// node_modules/micromark-util-chunked/index.js
function splice(list2, start, remove, items) {
let end = list2.length, chunkStart = 0, parameters;
if (start < 0 ? start = -start > end ? 0 : end + start : start = start > end ? end : start, remove = remove > 0 ? remove : 0, items.length < 1e4)
parameters = Array.from(items), parameters.unshift(start, remove), list2.splice(...parameters);
else
for (remove && list2.splice(start, remove); chunkStart < items.length; )
parameters = items.slice(chunkStart, chunkStart + 1e4), parameters.unshift(start, 0), list2.splice(...parameters), chunkStart += 1e4, start += 1e4;
}
function push(list2, items) {
return list2.length > 0 ? (splice(list2, list2.length, 0, items), list2) : items;
}
 
// node_modules/micromark-util-combine-extensions/index.js
var hasOwnProperty = {}.hasOwnProperty;
function combineExtensions(extensions) {
let all2 = {}, index2 = -1;
for (; ++index2 < extensions.length; )
syntaxExtension(all2, extensions[index2]);
return all2;
}
function syntaxExtension(all2, extension2) {
let hook;
for (hook in extension2) {
let left = (hasOwnProperty.call(all2, hook) ? all2[hook] : void 0) || (all2[hook] = {}), right = extension2[hook], code;
if (right)
for (code in right) {
hasOwnProperty.call(left, code) || (left[code] = []);
let value = right[code];
constructs(
// @ts-expect-error Looks like a list.
left[code],
Array.isArray(value) ? value : value ? [value] : []
);
}
}
}
function constructs(existing, list2) {
let index2 = -1, before = [];
for (; ++index2 < list2.length; )
(list2[index2].add === "after" ? existing : before).push(list2[index2]);
splice(existing, 0, 0, before);
}
 
// node_modules/micromark-util-decode-numeric-character-reference/index.js
function decodeNumericCharacterReference(value, base) {
let code = Number.parseInt(value, base);
return (
// C0 except for HT, LF, FF, CR, space.
code < 9 || code === 11 || code > 13 && code < 32 || // Control character (DEL) of C0, and C1 controls.
code > 126 && code < 160 || // Lone high surrogates and low surrogates.
code > 55295 && code < 57344 || // Noncharacters.
code > 64975 && code < 65008 || /* eslint-disable no-bitwise */
(code & 65535) === 65535 || (code & 65535) === 65534 || /* eslint-enable no-bitwise */
// Out of range
code > 1114111 ? "\uFFFD" : String.fromCodePoint(code)
);
}
 
// node_modules/micromark-util-normalize-identifier/index.js
function normalizeIdentifier(value) {
return value.replace(/[\t\n\r ]+/g, " ").replace(/^ | $/g, "").toLowerCase().toUpperCase();
}
 
// node_modules/micromark-util-character/index.js
var asciiAlpha = regexCheck(/[A-Za-z]/), asciiAlphanumeric = regexCheck(/[\dA-Za-z]/), asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/);
function asciiControl(code) {
return (
// Special whitespace codes (which have negative values), C0 and Control
// character DEL
code !== null && (code < 32 || code === 127)
);
}
var asciiDigit = regexCheck(/\d/), asciiHexDigit = regexCheck(/[\dA-Fa-f]/), asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/);
function markdownLineEnding(code) {
return code !== null && code < -2;
}
function markdownLineEndingOrSpace(code) {
return code !== null && (code < 0 || code === 32);
}
function markdownSpace(code) {
return code === -2 || code === -1 || code === 32;
}
var unicodePunctuation = regexCheck(/\p{P}|\p{S}/u), unicodeWhitespace = regexCheck(/\s/);
function regexCheck(regex) {
return check;
function check(code) {
return code !== null && code > -1 && regex.test(String.fromCharCode(code));
}
}
 
// node_modules/micromark-factory-space/index.js
function factorySpace(effects, ok2, type, max) {
let limit = max ? max - 1 : Number.POSITIVE_INFINITY, size = 0;
return start;
function start(code) {
return markdownSpace(code) ? (effects.enter(type), prefix(code)) : ok2(code);
}
function prefix(code) {
return markdownSpace(code) && size++ < limit ? (effects.consume(code), prefix) : (effects.exit(type), ok2(code));
}
}
 
// node_modules/micromark/lib/initialize/content.js
var content = {
tokenize: initializeContent
};
function initializeContent(effects) {
let contentStart = effects.attempt(
this.parser.constructs.contentInitial,
afterContentStartConstruct,
paragraphInitial
), previous2;
return contentStart;
function afterContentStartConstruct(code) {
if (code === null) {
effects.consume(code);
return;
}
return effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), factorySpace(effects, contentStart, "linePrefix");
}
function paragraphInitial(code) {
return effects.enter("paragraph"), lineStart(code);
}
function lineStart(code) {
let token = effects.enter("chunkText", {
contentType: "text",
previous: previous2
});
return previous2 && (previous2.next = token), previous2 = token, data(code);
}
function data(code) {
if (code === null) {
effects.exit("chunkText"), effects.exit("paragraph"), effects.consume(code);
return;
}
return markdownLineEnding(code) ? (effects.consume(code), effects.exit("chunkText"), lineStart) : (effects.consume(code), data);
}
}
 
// node_modules/micromark/lib/initialize/document.js
var document2 = {
tokenize: initializeDocument
}, containerConstruct = {
tokenize: tokenizeContainer
};
function initializeDocument(effects) {
let self2 = this, stack = [], continued = 0, childFlow, childToken, lineStartOffset;
return start;
function start(code) {
if (continued < stack.length) {
let item = stack[continued];
return self2.containerState = item[1], effects.attempt(
item[0].continuation,
documentContinue,
checkNewContainers
)(code);
}
return checkNewContainers(code);
}
function documentContinue(code) {
if (continued++, self2.containerState._closeFlow) {
self2.containerState._closeFlow = void 0, childFlow && closeFlow();
let indexBeforeExits = self2.events.length, indexBeforeFlow = indexBeforeExits, point3;
for (; indexBeforeFlow--; )
if (self2.events[indexBeforeFlow][0] === "exit" && self2.events[indexBeforeFlow][1].type === "chunkFlow") {
point3 = self2.events[indexBeforeFlow][1].end;
break;
}
exitContainers(continued);
let index2 = indexBeforeExits;
for (; index2 < self2.events.length; )
self2.events[index2][1].end = Object.assign({}, point3), index2++;
return splice(
self2.events,
indexBeforeFlow + 1,
0,
self2.events.slice(indexBeforeExits)
), self2.events.length = index2, checkNewContainers(code);
}
return start(code);
}
function checkNewContainers(code) {
if (continued === stack.length) {
if (!childFlow)
return documentContinued(code);
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete)
return flowStart(code);
self2.interrupt = !!(childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack);
}
return self2.containerState = {}, effects.check(
containerConstruct,
thereIsANewContainer,
thereIsNoNewContainer
)(code);
}
function thereIsANewContainer(code) {
return childFlow && closeFlow(), exitContainers(continued), documentContinued(code);
}
function thereIsNoNewContainer(code) {
return self2.parser.lazy[self2.now().line] = continued !== stack.length, lineStartOffset = self2.now().offset, flowStart(code);
}
function documentContinued(code) {
return self2.containerState = {}, effects.attempt(
containerConstruct,
containerContinue,
flowStart
)(code);
}
function containerContinue(code) {
return continued++, stack.push([self2.currentConstruct, self2.containerState]), documentContinued(code);
}
function flowStart(code) {
if (code === null) {
childFlow && closeFlow(), exitContainers(0), effects.consume(code);
return;
}
return childFlow = childFlow || self2.parser.flow(self2.now()), effects.enter("chunkFlow", {
contentType: "flow",
previous: childToken,
_tokenizer: childFlow
}), flowContinue(code);
}
function flowContinue(code) {
if (code === null) {
writeToChild(effects.exit("chunkFlow"), !0), exitContainers(0), effects.consume(code);
return;
}
return markdownLineEnding(code) ? (effects.consume(code), writeToChild(effects.exit("chunkFlow")), continued = 0, self2.interrupt = void 0, start) : (effects.consume(code), flowContinue);
}
function writeToChild(token, eof) {
let stream = self2.sliceStream(token);
if (eof && stream.push(null), token.previous = childToken, childToken && (childToken.next = token), childToken = token, childFlow.defineSkip(token.start), childFlow.write(stream), self2.parser.lazy[token.start.line]) {
let index2 = childFlow.events.length;
for (; index2--; )
if (
// The token starts before the line ending…
childFlow.events[index2][1].start.offset < lineStartOffset && // …and either is not ended yet…
(!childFlow.events[index2][1].end || // …or ends after it.
childFlow.events[index2][1].end.offset > lineStartOffset)
)
return;
let indexBeforeExits = self2.events.length, indexBeforeFlow = indexBeforeExits, seen, point3;
for (; indexBeforeFlow--; )
if (self2.events[indexBeforeFlow][0] === "exit" && self2.events[indexBeforeFlow][1].type === "chunkFlow") {
if (seen) {
point3 = self2.events[indexBeforeFlow][1].end;
break;
}
seen = !0;
}
for (exitContainers(continued), index2 = indexBeforeExits; index2 < self2.events.length; )
self2.events[index2][1].end = Object.assign({}, point3), index2++;
splice(
self2.events,
indexBeforeFlow + 1,
0,
self2.events.slice(indexBeforeExits)
), self2.events.length = index2;
}
}
function exitContainers(size) {
let index2 = stack.length;
for (; index2-- > size; ) {
let entry = stack[index2];
self2.containerState = entry[1], entry[0].exit.call(self2, effects);
}
stack.length = size;
}
function closeFlow() {
childFlow.write([null]), childToken = void 0, childFlow = void 0, self2.containerState._closeFlow = void 0;
}
}
function tokenizeContainer(effects, ok2, nok) {
return factorySpace(
effects,
effects.attempt(this.parser.constructs.document, ok2, nok),
"linePrefix",
this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4
);
}
 
// node_modules/micromark-util-classify-character/index.js
function classifyCharacter(code) {
if (code === null || markdownLineEndingOrSpace(code) || unicodeWhitespace(code))
return 1;
if (unicodePunctuation(code))
return 2;
}
 
// node_modules/micromark-util-resolve-all/index.js
function resolveAll(constructs2, events, context) {
let called = [], index2 = -1;
for (; ++index2 < constructs2.length; ) {
let resolve = constructs2[index2].resolveAll;
resolve && !called.includes(resolve) && (events = resolve(events, context), called.push(resolve));
}
return events;
}
 
// node_modules/micromark-core-commonmark/lib/attention.js
var attention = {
name: "attention",
tokenize: tokenizeAttention,
resolveAll: resolveAllAttention
};
function resolveAllAttention(events, context) {
let index2 = -1, open, group, text3, openingSequence, closingSequence, use, nextEvents, offset;
for (; ++index2 < events.length; )
if (events[index2][0] === "enter" && events[index2][1].type === "attentionSequence" && events[index2][1]._close) {
for (open = index2; open--; )
if (events[open][0] === "exit" && events[open][1].type === "attentionSequence" && events[open][1]._open && // If the markers are the same:
context.sliceSerialize(events[open][1]).charCodeAt(0) === context.sliceSerialize(events[index2][1]).charCodeAt(0)) {
if ((events[open][1]._close || events[index2][1]._open) && (events[index2][1].end.offset - events[index2][1].start.offset) % 3 && !((events[open][1].end.offset - events[open][1].start.offset + events[index2][1].end.offset - events[index2][1].start.offset) % 3))
continue;
use = events[open][1].end.offset - events[open][1].start.offset > 1 && events[index2][1].end.offset - events[index2][1].start.offset > 1 ? 2 : 1;
let start = Object.assign({}, events[open][1].end), end = Object.assign({}, events[index2][1].start);
movePoint(start, -use), movePoint(end, use), openingSequence = {
type: use > 1 ? "strongSequence" : "emphasisSequence",
start,
end: Object.assign({}, events[open][1].end)
}, closingSequence = {
type: use > 1 ? "strongSequence" : "emphasisSequence",
start: Object.assign({}, events[index2][1].start),
end
}, text3 = {
type: use > 1 ? "strongText" : "emphasisText",
start: Object.assign({}, events[open][1].end),
end: Object.assign({}, events[index2][1].start)
}, group = {
type: use > 1 ? "strong" : "emphasis",
start: Object.assign({}, openingSequence.start),
end: Object.assign({}, closingSequence.end)
}, events[open][1].end = Object.assign({}, openingSequence.start), events[index2][1].start = Object.assign({}, closingSequence.end), nextEvents = [], events[open][1].end.offset - events[open][1].start.offset && (nextEvents = push(nextEvents, [["enter", events[open][1], context], ["exit", events[open][1], context]])), nextEvents = push(nextEvents, [["enter", group, context], ["enter", openingSequence, context], ["exit", openingSequence, context], ["enter", text3, context]]), nextEvents = push(nextEvents, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + 1, index2), context)), nextEvents = push(nextEvents, [["exit", text3, context], ["enter", closingSequence, context], ["exit", closingSequence, context], ["exit", group, context]]), events[index2][1].end.offset - events[index2][1].start.offset ? (offset = 2, nextEvents = push(nextEvents, [["enter", events[index2][1], context], ["exit", events[index2][1], context]])) : offset = 0, splice(events, open - 1, index2 - open + 3, nextEvents), index2 = open + nextEvents.length - offset - 2;
break;
}
}
for (index2 = -1; ++index2 < events.length; )
events[index2][1].type === "attentionSequence" && (events[index2][1].type = "data");
return events;
}
function tokenizeAttention(effects, ok2) {
let attentionMarkers2 = this.parser.constructs.attentionMarkers.null, previous2 = this.previous, before = classifyCharacter(previous2), marker;
return start;
function start(code) {
return marker = code, effects.enter("attentionSequence"), inside(code);
}
function inside(code) {
if (code === marker)
return effects.consume(code), inside;
let token = effects.exit("attentionSequence"), after = classifyCharacter(code), open = !after || after === 2 && before || attentionMarkers2.includes(code), close = !before || before === 2 && after || attentionMarkers2.includes(previous2);
return token._open = !!(marker === 42 ? open : open && (before || !close)), token._close = !!(marker === 42 ? close : close && (after || !open)), ok2(code);
}
}
function movePoint(point3, offset) {
point3.column += offset, point3.offset += offset, point3._bufferIndex += offset;
}
 
// node_modules/micromark-core-commonmark/lib/autolink.js
var autolink = {
name: "autolink",
tokenize: tokenizeAutolink
};
function tokenizeAutolink(effects, ok2, nok) {
let size = 0;
return start;
function start(code) {
return effects.enter("autolink"), effects.enter("autolinkMarker"), effects.consume(code), effects.exit("autolinkMarker"), effects.enter("autolinkProtocol"), open;
}
function open(code) {
return asciiAlpha(code) ? (effects.consume(code), schemeOrEmailAtext) : code === 64 ? nok(code) : emailAtext(code);
}
function schemeOrEmailAtext(code) {
return code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code) ? (size = 1, schemeInsideOrEmailAtext(code)) : emailAtext(code);
}
function schemeInsideOrEmailAtext(code) {
return code === 58 ? (effects.consume(code), size = 0, urlInside) : (code === 43 || code === 45 || code === 46 || asciiAlphanumeric(code)) && size++ < 32 ? (effects.consume(code), schemeInsideOrEmailAtext) : (size = 0, emailAtext(code));
}
function urlInside(code) {
return code === 62 ? (effects.exit("autolinkProtocol"), effects.enter("autolinkMarker"), effects.consume(code), effects.exit("autolinkMarker"), effects.exit("autolink"), ok2) : code === null || code === 32 || code === 60 || asciiControl(code) ? nok(code) : (effects.consume(code), urlInside);
}
function emailAtext(code) {
return code === 64 ? (effects.consume(code), emailAtSignOrDot) : asciiAtext(code) ? (effects.consume(code), emailAtext) : nok(code);
}
function emailAtSignOrDot(code) {
return asciiAlphanumeric(code) ? emailLabel(code) : nok(code);
}
function emailLabel(code) {
return code === 46 ? (effects.consume(code), size = 0, emailAtSignOrDot) : code === 62 ? (effects.exit("autolinkProtocol").type = "autolinkEmail", effects.enter("autolinkMarker"), effects.consume(code), effects.exit("autolinkMarker"), effects.exit("autolink"), ok2) : emailValue(code);
}
function emailValue(code) {
if ((code === 45 || asciiAlphanumeric(code)) && size++ < 63) {
let next = code === 45 ? emailValue : emailLabel;
return effects.consume(code), next;
}
return nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/blank-line.js
var blankLine = {
tokenize: tokenizeBlankLine,
partial: !0
};
function tokenizeBlankLine(effects, ok2, nok) {
return start;
function start(code) {
return markdownSpace(code) ? factorySpace(effects, after, "linePrefix")(code) : after(code);
}
function after(code) {
return code === null || markdownLineEnding(code) ? ok2(code) : nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/block-quote.js
var blockQuote = {
name: "blockQuote",
tokenize: tokenizeBlockQuoteStart,
continuation: {
tokenize: tokenizeBlockQuoteContinuation
},
exit
};
function tokenizeBlockQuoteStart(effects, ok2, nok) {
let self2 = this;
return start;
function start(code) {
if (code === 62) {
let state = self2.containerState;
return state.open || (effects.enter("blockQuote", {
_container: !0
}), state.open = !0), effects.enter("blockQuotePrefix"), effects.enter("blockQuoteMarker"), effects.consume(code), effects.exit("blockQuoteMarker"), after;
}
return nok(code);
}
function after(code) {
return markdownSpace(code) ? (effects.enter("blockQuotePrefixWhitespace"), effects.consume(code), effects.exit("blockQuotePrefixWhitespace"), effects.exit("blockQuotePrefix"), ok2) : (effects.exit("blockQuotePrefix"), ok2(code));
}
}
function tokenizeBlockQuoteContinuation(effects, ok2, nok) {
let self2 = this;
return contStart;
function contStart(code) {
return markdownSpace(code) ? factorySpace(effects, contBefore, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code) : contBefore(code);
}
function contBefore(code) {
return effects.attempt(blockQuote, ok2, nok)(code);
}
}
function exit(effects) {
effects.exit("blockQuote");
}
 
// node_modules/micromark-core-commonmark/lib/character-escape.js
var characterEscape = {
name: "characterEscape",
tokenize: tokenizeCharacterEscape
};
function tokenizeCharacterEscape(effects, ok2, nok) {
return start;
function start(code) {
return effects.enter("characterEscape"), effects.enter("escapeMarker"), effects.consume(code), effects.exit("escapeMarker"), inside;
}
function inside(code) {
return asciiPunctuation(code) ? (effects.enter("characterEscapeValue"), effects.consume(code), effects.exit("characterEscapeValue"), effects.exit("characterEscape"), ok2) : nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/character-reference.js
var characterReference = {
name: "characterReference",
tokenize: tokenizeCharacterReference
};
function tokenizeCharacterReference(effects, ok2, nok) {
let self2 = this, size = 0, max, test;
return start;
function start(code) {
return effects.enter("characterReference"), effects.enter("characterReferenceMarker"), effects.consume(code), effects.exit("characterReferenceMarker"), open;
}
function open(code) {
return code === 35 ? (effects.enter("characterReferenceMarkerNumeric"), effects.consume(code), effects.exit("characterReferenceMarkerNumeric"), numeric) : (effects.enter("characterReferenceValue"), max = 31, test = asciiAlphanumeric, value(code));
}
function numeric(code) {
return code === 88 || code === 120 ? (effects.enter("characterReferenceMarkerHexadecimal"), effects.consume(code), effects.exit("characterReferenceMarkerHexadecimal"), effects.enter("characterReferenceValue"), max = 6, test = asciiHexDigit, value) : (effects.enter("characterReferenceValue"), max = 7, test = asciiDigit, value(code));
}
function value(code) {
if (code === 59 && size) {
let token = effects.exit("characterReferenceValue");
return test === asciiAlphanumeric && !decodeNamedCharacterReference(self2.sliceSerialize(token)) ? nok(code) : (effects.enter("characterReferenceMarker"), effects.consume(code), effects.exit("characterReferenceMarker"), effects.exit("characterReference"), ok2);
}
return test(code) && size++ < max ? (effects.consume(code), value) : nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/code-fenced.js
var nonLazyContinuation = {
tokenize: tokenizeNonLazyContinuation,
partial: !0
}, codeFenced = {
name: "codeFenced",
tokenize: tokenizeCodeFenced,
concrete: !0
};
function tokenizeCodeFenced(effects, ok2, nok) {
let self2 = this, closeStart = {
tokenize: tokenizeCloseStart,
partial: !0
}, initialPrefix = 0, sizeOpen = 0, marker;
return start;
function start(code) {
return beforeSequenceOpen(code);
}
function beforeSequenceOpen(code) {
let tail = self2.events[self2.events.length - 1];
return initialPrefix = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], !0).length : 0, marker = code, effects.enter("codeFenced"), effects.enter("codeFencedFence"), effects.enter("codeFencedFenceSequence"), sequenceOpen(code);
}
function sequenceOpen(code) {
return code === marker ? (sizeOpen++, effects.consume(code), sequenceOpen) : sizeOpen < 3 ? nok(code) : (effects.exit("codeFencedFenceSequence"), markdownSpace(code) ? factorySpace(effects, infoBefore, "whitespace")(code) : infoBefore(code));
}
function infoBefore(code) {
return code === null || markdownLineEnding(code) ? (effects.exit("codeFencedFence"), self2.interrupt ? ok2(code) : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code)) : (effects.enter("codeFencedFenceInfo"), effects.enter("chunkString", {
contentType: "string"
}), info(code));
}
function info(code) {
return code === null || markdownLineEnding(code) ? (effects.exit("chunkString"), effects.exit("codeFencedFenceInfo"), infoBefore(code)) : markdownSpace(code) ? (effects.exit("chunkString"), effects.exit("codeFencedFenceInfo"), factorySpace(effects, metaBefore, "whitespace")(code)) : code === 96 && code === marker ? nok(code) : (effects.consume(code), info);
}
function metaBefore(code) {
return code === null || markdownLineEnding(code) ? infoBefore(code) : (effects.enter("codeFencedFenceMeta"), effects.enter("chunkString", {
contentType: "string"
}), meta(code));
}
function meta(code) {
return code === null || markdownLineEnding(code) ? (effects.exit("chunkString"), effects.exit("codeFencedFenceMeta"), infoBefore(code)) : code === 96 && code === marker ? nok(code) : (effects.consume(code), meta);
}
function atNonLazyBreak(code) {
return effects.attempt(closeStart, after, contentBefore)(code);
}
function contentBefore(code) {
return effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), contentStart;
}
function contentStart(code) {
return initialPrefix > 0 && markdownSpace(code) ? factorySpace(effects, beforeContentChunk, "linePrefix", initialPrefix + 1)(code) : beforeContentChunk(code);
}
function beforeContentChunk(code) {
return code === null || markdownLineEnding(code) ? effects.check(nonLazyContinuation, atNonLazyBreak, after)(code) : (effects.enter("codeFlowValue"), contentChunk(code));
}
function contentChunk(code) {
return code === null || markdownLineEnding(code) ? (effects.exit("codeFlowValue"), beforeContentChunk(code)) : (effects.consume(code), contentChunk);
}
function after(code) {
return effects.exit("codeFenced"), ok2(code);
}
function tokenizeCloseStart(effects2, ok3, nok2) {
let size = 0;
return startBefore;
function startBefore(code) {
return effects2.enter("lineEnding"), effects2.consume(code), effects2.exit("lineEnding"), start2;
}
function start2(code) {
return effects2.enter("codeFencedFence"), markdownSpace(code) ? factorySpace(effects2, beforeSequenceClose, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code) : beforeSequenceClose(code);
}
function beforeSequenceClose(code) {
return code === marker ? (effects2.enter("codeFencedFenceSequence"), sequenceClose(code)) : nok2(code);
}
function sequenceClose(code) {
return code === marker ? (size++, effects2.consume(code), sequenceClose) : size >= sizeOpen ? (effects2.exit("codeFencedFenceSequence"), markdownSpace(code) ? factorySpace(effects2, sequenceCloseAfter, "whitespace")(code) : sequenceCloseAfter(code)) : nok2(code);
}
function sequenceCloseAfter(code) {
return code === null || markdownLineEnding(code) ? (effects2.exit("codeFencedFence"), ok3(code)) : nok2(code);
}
}
}
function tokenizeNonLazyContinuation(effects, ok2, nok) {
let self2 = this;
return start;
function start(code) {
return code === null ? nok(code) : (effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), lineStart);
}
function lineStart(code) {
return self2.parser.lazy[self2.now().line] ? nok(code) : ok2(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/code-indented.js
var codeIndented = {
name: "codeIndented",
tokenize: tokenizeCodeIndented
}, furtherStart = {
tokenize: tokenizeFurtherStart,
partial: !0
};
function tokenizeCodeIndented(effects, ok2, nok) {
let self2 = this;
return start;
function start(code) {
return effects.enter("codeIndented"), factorySpace(effects, afterPrefix, "linePrefix", 5)(code);
}
function afterPrefix(code) {
let tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], !0).length >= 4 ? atBreak(code) : nok(code);
}
function atBreak(code) {
return code === null ? after(code) : markdownLineEnding(code) ? effects.attempt(furtherStart, atBreak, after)(code) : (effects.enter("codeFlowValue"), inside(code));
}
function inside(code) {
return code === null || markdownLineEnding(code) ? (effects.exit("codeFlowValue"), atBreak(code)) : (effects.consume(code), inside);
}
function after(code) {
return effects.exit("codeIndented"), ok2(code);
}
}
function tokenizeFurtherStart(effects, ok2, nok) {
let self2 = this;
return furtherStart2;
function furtherStart2(code) {
return self2.parser.lazy[self2.now().line] ? nok(code) : markdownLineEnding(code) ? (effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), furtherStart2) : factorySpace(effects, afterPrefix, "linePrefix", 5)(code);
}
function afterPrefix(code) {
let tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], !0).length >= 4 ? ok2(code) : markdownLineEnding(code) ? furtherStart2(code) : nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/code-text.js
var codeText = {
name: "codeText",
tokenize: tokenizeCodeText,
resolve: resolveCodeText,
previous
};
function resolveCodeText(events) {
let tailExitIndex = events.length - 4, headEnterIndex = 3, index2, enter;
if ((events[headEnterIndex][1].type === "lineEnding" || events[headEnterIndex][1].type === "space") && (events[tailExitIndex][1].type === "lineEnding" || events[tailExitIndex][1].type === "space")) {
for (index2 = headEnterIndex; ++index2 < tailExitIndex; )
if (events[index2][1].type === "codeTextData") {
events[headEnterIndex][1].type = "codeTextPadding", events[tailExitIndex][1].type = "codeTextPadding", headEnterIndex += 2, tailExitIndex -= 2;
break;
}
}
for (index2 = headEnterIndex - 1, tailExitIndex++; ++index2 <= tailExitIndex; )
enter === void 0 ? index2 !== tailExitIndex && events[index2][1].type !== "lineEnding" && (enter = index2) : (index2 === tailExitIndex || events[index2][1].type === "lineEnding") && (events[enter][1].type = "codeTextData", index2 !== enter + 2 && (events[enter][1].end = events[index2 - 1][1].end, events.splice(enter + 2, index2 - enter - 2), tailExitIndex -= index2 - enter - 2, index2 = enter + 2), enter = void 0);
return events;
}
function previous(code) {
return code !== 96 || this.events[this.events.length - 1][1].type === "characterEscape";
}
function tokenizeCodeText(effects, ok2, nok) {
let self2 = this, sizeOpen = 0, size, token;
return start;
function start(code) {
return effects.enter("codeText"), effects.enter("codeTextSequence"), sequenceOpen(code);
}
function sequenceOpen(code) {
return code === 96 ? (effects.consume(code), sizeOpen++, sequenceOpen) : (effects.exit("codeTextSequence"), between(code));
}
function between(code) {
return code === null ? nok(code) : code === 32 ? (effects.enter("space"), effects.consume(code), effects.exit("space"), between) : code === 96 ? (token = effects.enter("codeTextSequence"), size = 0, sequenceClose(code)) : markdownLineEnding(code) ? (effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), between) : (effects.enter("codeTextData"), data(code));
}
function data(code) {
return code === null || code === 32 || code === 96 || markdownLineEnding(code) ? (effects.exit("codeTextData"), between(code)) : (effects.consume(code), data);
}
function sequenceClose(code) {
return code === 96 ? (effects.consume(code), size++, sequenceClose) : size === sizeOpen ? (effects.exit("codeTextSequence"), effects.exit("codeText"), ok2(code)) : (token.type = "codeTextData", data(code));
}
}
 
// node_modules/micromark-util-subtokenize/lib/splice-buffer.js
var SpliceBuffer = class {
/**
* @param {ReadonlyArray<T> | null | undefined} [initial]
* Initial items (optional).
* @returns
* Splice buffer.
*/
constructor(initial) {
this.left = initial ? [...initial] : [], this.right = [];
}
/**
* Array access;
* does not move the cursor.
*
* @param {number} index
* Index.
* @return {T}
* Item.
*/
get(index2) {
if (index2 < 0 || index2 >= this.left.length + this.right.length)
throw new RangeError("Cannot access index `" + index2 + "` in a splice buffer of size `" + (this.left.length + this.right.length) + "`");
return index2 < this.left.length ? this.left[index2] : this.right[this.right.length - index2 + this.left.length - 1];
}
/**
* The length of the splice buffer, one greater than the largest index in the
* array.
*/
get length() {
return this.left.length + this.right.length;
}
/**
* Remove and return `list[0]`;
* moves the cursor to `0`.
*
* @returns {T | undefined}
* Item, optional.
*/
shift() {
return this.setCursor(0), this.right.pop();
}
/**
* Slice the buffer to get an array;
* does not move the cursor.
*
* @param {number} start
* Start.
* @param {number | null | undefined} [end]
* End (optional).
* @returns {Array<T>}
* Array of items.
*/
slice(start, end) {
let stop = end == null ? Number.POSITIVE_INFINITY : end;
return stop < this.left.length ? this.left.slice(start, stop) : start > this.left.length ? this.right.slice(this.right.length - stop + this.left.length, this.right.length - start + this.left.length).reverse() : this.left.slice(start).concat(this.right.slice(this.right.length - stop + this.left.length).reverse());
}
/**
* Mimics the behavior of Array.prototype.splice() except for the change of
* interface necessary to avoid segfaults when patching in very large arrays.
*
* This operation moves cursor is moved to `start` and results in the cursor
* placed after any inserted items.
*
* @param {number} start
* Start;
* zero-based index at which to start changing the array;
* negative numbers count backwards from the end of the array and values
* that are out-of bounds are clamped to the appropriate end of the array.
* @param {number | null | undefined} [deleteCount=0]
* Delete count (default: `0`);
* maximum number of elements to delete, starting from start.
* @param {Array<T> | null | undefined} [items=[]]
* Items to include in place of the deleted items (default: `[]`).
* @return {Array<T>}
* Any removed items.
*/
splice(start, deleteCount, items) {
let count = deleteCount || 0;
this.setCursor(Math.trunc(start));
let removed = this.right.splice(this.right.length - count, Number.POSITIVE_INFINITY);
return items && chunkedPush(this.left, items), removed.reverse();
}
/**
* Remove and return the highest-numbered item in the array, so
* `list[list.length - 1]`;
* Moves the cursor to `length`.
*
* @returns {T | undefined}
* Item, optional.
*/
pop() {
return this.setCursor(Number.POSITIVE_INFINITY), this.left.pop();
}
/**
* Inserts a single item to the high-numbered side of the array;
* moves the cursor to `length`.
*
* @param {T} item
* Item.
* @returns {undefined}
* Nothing.
*/
push(item) {
this.setCursor(Number.POSITIVE_INFINITY), this.left.push(item);
}
/**
* Inserts many items to the high-numbered side of the array.
* Moves the cursor to `length`.
*
* @param {Array<T>} items
* Items.
* @returns {undefined}
* Nothing.
*/
pushMany(items) {
this.setCursor(Number.POSITIVE_INFINITY), chunkedPush(this.left, items);
}
/**
* Inserts a single item to the low-numbered side of the array;
* Moves the cursor to `0`.
*
* @param {T} item
* Item.
* @returns {undefined}
* Nothing.
*/
unshift(item) {
this.setCursor(0), this.right.push(item);
}
/**
* Inserts many items to the low-numbered side of the array;
* moves the cursor to `0`.
*
* @param {Array<T>} items
* Items.
* @returns {undefined}
* Nothing.
*/
unshiftMany(items) {
this.setCursor(0), chunkedPush(this.right, items.reverse());
}
/**
* Move the cursor to a specific position in the array. Requires
* time proportional to the distance moved.
*
* If `n < 0`, the cursor will end up at the beginning.
* If `n > length`, the cursor will end up at the end.
*
* @param {number} n
* Position.
* @return {undefined}
* Nothing.
*/
setCursor(n) {
if (!(n === this.left.length || n > this.left.length && this.right.length === 0 || n < 0 && this.left.length === 0))
if (n < this.left.length) {
let removed = this.left.splice(n, Number.POSITIVE_INFINITY);
chunkedPush(this.right, removed.reverse());
} else {
let removed = this.right.splice(this.left.length + this.right.length - n, Number.POSITIVE_INFINITY);
chunkedPush(this.left, removed.reverse());
}
}
};
function chunkedPush(list2, right) {
let chunkStart = 0;
if (right.length < 1e4)
list2.push(...right);
else
for (; chunkStart < right.length; )
list2.push(...right.slice(chunkStart, chunkStart + 1e4)), chunkStart += 1e4;
}
 
// node_modules/micromark-util-subtokenize/index.js
function subtokenize(eventsArray) {
let jumps = {}, index2 = -1, event, lineIndex, otherIndex, otherEvent, parameters, subevents, more, events = new SpliceBuffer(eventsArray);
for (; ++index2 < events.length; ) {
for (; index2 in jumps; )
index2 = jumps[index2];
if (event = events.get(index2), index2 && event[1].type === "chunkFlow" && events.get(index2 - 1)[1].type === "listItemPrefix" && (subevents = event[1]._tokenizer.events, otherIndex = 0, otherIndex < subevents.length && subevents[otherIndex][1].type === "lineEndingBlank" && (otherIndex += 2), otherIndex < subevents.length && subevents[otherIndex][1].type === "content"))
for (; ++otherIndex < subevents.length && subevents[otherIndex][1].type !== "content"; )
subevents[otherIndex][1].type === "chunkText" && (subevents[otherIndex][1]._isInFirstContentOfListItem = !0, otherIndex++);
if (event[0] === "enter")
event[1].contentType && (Object.assign(jumps, subcontent(events, index2)), index2 = jumps[index2], more = !0);
else if (event[1]._container) {
for (otherIndex = index2, lineIndex = void 0; otherIndex-- && (otherEvent = events.get(otherIndex), otherEvent[1].type === "lineEnding" || otherEvent[1].type === "lineEndingBlank"); )
otherEvent[0] === "enter" && (lineIndex && (events.get(lineIndex)[1].type = "lineEndingBlank"), otherEvent[1].type = "lineEnding", lineIndex = otherIndex);
lineIndex && (event[1].end = Object.assign({}, events.get(lineIndex)[1].start), parameters = events.slice(lineIndex, index2), parameters.unshift(event), events.splice(lineIndex, index2 - lineIndex + 1, parameters));
}
}
return splice(eventsArray, 0, Number.POSITIVE_INFINITY, events.slice(0)), !more;
}
function subcontent(events, eventIndex) {
let token = events.get(eventIndex)[1], context = events.get(eventIndex)[2], startPosition = eventIndex - 1, startPositions = [], tokenizer = token._tokenizer || context.parser[token.contentType](token.start), childEvents = tokenizer.events, jumps = [], gaps = {}, stream, previous2, index2 = -1, current = token, adjust = 0, start = 0, breaks = [start];
for (; current; ) {
for (; events.get(++startPosition)[1] !== current; )
;
startPositions.push(startPosition), current._tokenizer || (stream = context.sliceStream(current), current.next || stream.push(null), previous2 && tokenizer.defineSkip(current.start), current._isInFirstContentOfListItem && (tokenizer._gfmTasklistFirstContentOfListItem = !0), tokenizer.write(stream), current._isInFirstContentOfListItem && (tokenizer._gfmTasklistFirstContentOfListItem = void 0)), previous2 = current, current = current.next;
}
for (current = token; ++index2 < childEvents.length; )
// Find a void token that includes a break.
childEvents[index2][0] === "exit" && childEvents[index2 - 1][0] === "enter" && childEvents[index2][1].type === childEvents[index2 - 1][1].type && childEvents[index2][1].start.line !== childEvents[index2][1].end.line && (start = index2 + 1, breaks.push(start), current._tokenizer = void 0, current.previous = void 0, current = current.next);
for (tokenizer.events = [], current ? (current._tokenizer = void 0, current.previous = void 0) : breaks.pop(), index2 = breaks.length; index2--; ) {
let slice = childEvents.slice(breaks[index2], breaks[index2 + 1]), start2 = startPositions.pop();
jumps.push([start2, start2 + slice.length - 1]), events.splice(start2, 2, slice);
}
for (jumps.reverse(), index2 = -1; ++index2 < jumps.length; )
gaps[adjust + jumps[index2][0]] = adjust + jumps[index2][1], adjust += jumps[index2][1] - jumps[index2][0] - 1;
return gaps;
}
 
// node_modules/micromark-core-commonmark/lib/content.js
var content2 = {
tokenize: tokenizeContent,
resolve: resolveContent
}, continuationConstruct = {
tokenize: tokenizeContinuation,
partial: !0
};
function resolveContent(events) {
return subtokenize(events), events;
}
function tokenizeContent(effects, ok2) {
let previous2;
return chunkStart;
function chunkStart(code) {
return effects.enter("content"), previous2 = effects.enter("chunkContent", {
contentType: "content"
}), chunkInside(code);
}
function chunkInside(code) {
return code === null ? contentEnd(code) : markdownLineEnding(code) ? effects.check(continuationConstruct, contentContinue, contentEnd)(code) : (effects.consume(code), chunkInside);
}
function contentEnd(code) {
return effects.exit("chunkContent"), effects.exit("content"), ok2(code);
}
function contentContinue(code) {
return effects.consume(code), effects.exit("chunkContent"), previous2.next = effects.enter("chunkContent", {
contentType: "content",
previous: previous2
}), previous2 = previous2.next, chunkInside;
}
}
function tokenizeContinuation(effects, ok2, nok) {
let self2 = this;
return startLookahead;
function startLookahead(code) {
return effects.exit("chunkContent"), effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), factorySpace(effects, prefixed, "linePrefix");
}
function prefixed(code) {
if (code === null || markdownLineEnding(code))
return nok(code);
let tail = self2.events[self2.events.length - 1];
return !self2.parser.constructs.disable.null.includes("codeIndented") && tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], !0).length >= 4 ? ok2(code) : effects.interrupt(self2.parser.constructs.flow, nok, ok2)(code);
}
}
 
// node_modules/micromark-factory-destination/index.js
function factoryDestination(effects, ok2, nok, type, literalType, literalMarkerType, rawType, stringType, max) {
let limit = max || Number.POSITIVE_INFINITY, balance = 0;
return start;
function start(code) {
return code === 60 ? (effects.enter(type), effects.enter(literalType), effects.enter(literalMarkerType), effects.consume(code), effects.exit(literalMarkerType), enclosedBefore) : code === null || code === 32 || code === 41 || asciiControl(code) ? nok(code) : (effects.enter(type), effects.enter(rawType), effects.enter(stringType), effects.enter("chunkString", {
contentType: "string"
}), raw(code));
}
function enclosedBefore(code) {
return code === 62 ? (effects.enter(literalMarkerType), effects.consume(code), effects.exit(literalMarkerType), effects.exit(literalType), effects.exit(type), ok2) : (effects.enter(stringType), effects.enter("chunkString", {
contentType: "string"
}), enclosed(code));
}
function enclosed(code) {
return code === 62 ? (effects.exit("chunkString"), effects.exit(stringType), enclosedBefore(code)) : code === null || code === 60 || markdownLineEnding(code) ? nok(code) : (effects.consume(code), code === 92 ? enclosedEscape : enclosed);
}
function enclosedEscape(code) {
return code === 60 || code === 62 || code === 92 ? (effects.consume(code), enclosed) : enclosed(code);
}
function raw(code) {
return !balance && (code === null || code === 41 || markdownLineEndingOrSpace(code)) ? (effects.exit("chunkString"), effects.exit(stringType), effects.exit(rawType), effects.exit(type), ok2(code)) : balance < limit && code === 40 ? (effects.consume(code), balance++, raw) : code === 41 ? (effects.consume(code), balance--, raw) : code === null || code === 32 || code === 40 || asciiControl(code) ? nok(code) : (effects.consume(code), code === 92 ? rawEscape : raw);
}
function rawEscape(code) {
return code === 40 || code === 41 || code === 92 ? (effects.consume(code), raw) : raw(code);
}
}
 
// node_modules/micromark-factory-label/index.js
function factoryLabel(effects, ok2, nok, type, markerType, stringType) {
let self2 = this, size = 0, seen;
return start;
function start(code) {
return effects.enter(type), effects.enter(markerType), effects.consume(code), effects.exit(markerType), effects.enter(stringType), atBreak;
}
function atBreak(code) {
return size > 999 || code === null || code === 91 || code === 93 && !seen || // To do: remove in the future once we’ve switched from
// `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,
// which doesn’t need this.
// Hidden footnotes hook.
/* c8 ignore next 3 */
code === 94 && !size && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code) : code === 93 ? (effects.exit(stringType), effects.enter(markerType), effects.consume(code), effects.exit(markerType), effects.exit(type), ok2) : markdownLineEnding(code) ? (effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), atBreak) : (effects.enter("chunkString", {
contentType: "string"
}), labelInside(code));
}
function labelInside(code) {
return code === null || code === 91 || code === 93 || markdownLineEnding(code) || size++ > 999 ? (effects.exit("chunkString"), atBreak(code)) : (effects.consume(code), seen || (seen = !markdownSpace(code)), code === 92 ? labelEscape : labelInside);
}
function labelEscape(code) {
return code === 91 || code === 92 || code === 93 ? (effects.consume(code), size++, labelInside) : labelInside(code);
}
}
 
// node_modules/micromark-factory-title/index.js
function factoryTitle(effects, ok2, nok, type, markerType, stringType) {
let marker;
return start;
function start(code) {
return code === 34 || code === 39 || code === 40 ? (effects.enter(type), effects.enter(markerType), effects.consume(code), effects.exit(markerType), marker = code === 40 ? 41 : code, begin) : nok(code);
}
function begin(code) {
return code === marker ? (effects.enter(markerType), effects.consume(code), effects.exit(markerType), effects.exit(type), ok2) : (effects.enter(stringType), atBreak(code));
}
function atBreak(code) {
return code === marker ? (effects.exit(stringType), begin(marker)) : code === null ? nok(code) : markdownLineEnding(code) ? (effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), factorySpace(effects, atBreak, "linePrefix")) : (effects.enter("chunkString", {
contentType: "string"
}), inside(code));
}
function inside(code) {
return code === marker || code === null || markdownLineEnding(code) ? (effects.exit("chunkString"), atBreak(code)) : (effects.consume(code), code === 92 ? escape : inside);
}
function escape(code) {
return code === marker || code === 92 ? (effects.consume(code), inside) : inside(code);
}
}
 
// node_modules/micromark-factory-whitespace/index.js
function factoryWhitespace(effects, ok2) {
let seen;
return start;
function start(code) {
return markdownLineEnding(code) ? (effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), seen = !0, start) : markdownSpace(code) ? factorySpace(
effects,
start,
seen ? "linePrefix" : "lineSuffix"
)(code) : ok2(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/definition.js
var definition = {
name: "definition",
tokenize: tokenizeDefinition
}, titleBefore = {
tokenize: tokenizeTitleBefore,
partial: !0
};
function tokenizeDefinition(effects, ok2, nok) {
let self2 = this, identifier;
return start;
function start(code) {
return effects.enter("definition"), before(code);
}
function before(code) {
return factoryLabel.call(
self2,
effects,
labelAfter,
// Note: we don’t need to reset the way `markdown-rs` does.
nok,
"definitionLabel",
"definitionLabelMarker",
"definitionLabelString"
)(code);
}
function labelAfter(code) {
return identifier = normalizeIdentifier(self2.sliceSerialize(self2.events[self2.events.length - 1][1]).slice(1, -1)), code === 58 ? (effects.enter("definitionMarker"), effects.consume(code), effects.exit("definitionMarker"), markerAfter) : nok(code);
}
function markerAfter(code) {
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, destinationBefore)(code) : destinationBefore(code);
}
function destinationBefore(code) {
return factoryDestination(
effects,
destinationAfter,
// Note: we don’t need to reset the way `markdown-rs` does.
nok,
"definitionDestination",
"definitionDestinationLiteral",
"definitionDestinationLiteralMarker",
"definitionDestinationRaw",
"definitionDestinationString"
)(code);
}
function destinationAfter(code) {
return effects.attempt(titleBefore, after, after)(code);
}
function after(code) {
return markdownSpace(code) ? factorySpace(effects, afterWhitespace, "whitespace")(code) : afterWhitespace(code);
}
function afterWhitespace(code) {
return code === null || markdownLineEnding(code) ? (effects.exit("definition"), self2.parser.defined.push(identifier), ok2(code)) : nok(code);
}
}
function tokenizeTitleBefore(effects, ok2, nok) {
return titleBefore2;
function titleBefore2(code) {
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, beforeMarker)(code) : nok(code);
}
function beforeMarker(code) {
return factoryTitle(effects, titleAfter, nok, "definitionTitle", "definitionTitleMarker", "definitionTitleString")(code);
}
function titleAfter(code) {
return markdownSpace(code) ? factorySpace(effects, titleAfterOptionalWhitespace, "whitespace")(code) : titleAfterOptionalWhitespace(code);
}
function titleAfterOptionalWhitespace(code) {
return code === null || markdownLineEnding(code) ? ok2(code) : nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/hard-break-escape.js
var hardBreakEscape = {
name: "hardBreakEscape",
tokenize: tokenizeHardBreakEscape
};
function tokenizeHardBreakEscape(effects, ok2, nok) {
return start;
function start(code) {
return effects.enter("hardBreakEscape"), effects.consume(code), after;
}
function after(code) {
return markdownLineEnding(code) ? (effects.exit("hardBreakEscape"), ok2(code)) : nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/heading-atx.js
var headingAtx = {
name: "headingAtx",
tokenize: tokenizeHeadingAtx,
resolve: resolveHeadingAtx
};
function resolveHeadingAtx(events, context) {
let contentEnd = events.length - 2, contentStart = 3, content3, text3;
return events[contentStart][1].type === "whitespace" && (contentStart += 2), contentEnd - 2 > contentStart && events[contentEnd][1].type === "whitespace" && (contentEnd -= 2), events[contentEnd][1].type === "atxHeadingSequence" && (contentStart === contentEnd - 1 || contentEnd - 4 > contentStart && events[contentEnd - 2][1].type === "whitespace") && (contentEnd -= contentStart + 1 === contentEnd ? 2 : 4), contentEnd > contentStart && (content3 = {
type: "atxHeadingText",
start: events[contentStart][1].start,
end: events[contentEnd][1].end
}, text3 = {
type: "chunkText",
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
contentType: "text"
}, splice(events, contentStart, contentEnd - contentStart + 1, [["enter", content3, context], ["enter", text3, context], ["exit", text3, context], ["exit", content3, context]])), events;
}
function tokenizeHeadingAtx(effects, ok2, nok) {
let size = 0;
return start;
function start(code) {
return effects.enter("atxHeading"), before(code);
}
function before(code) {
return effects.enter("atxHeadingSequence"), sequenceOpen(code);
}
function sequenceOpen(code) {
return code === 35 && size++ < 6 ? (effects.consume(code), sequenceOpen) : code === null || markdownLineEndingOrSpace(code) ? (effects.exit("atxHeadingSequence"), atBreak(code)) : nok(code);
}
function atBreak(code) {
return code === 35 ? (effects.enter("atxHeadingSequence"), sequenceFurther(code)) : code === null || markdownLineEnding(code) ? (effects.exit("atxHeading"), ok2(code)) : markdownSpace(code) ? factorySpace(effects, atBreak, "whitespace")(code) : (effects.enter("atxHeadingText"), data(code));
}
function sequenceFurther(code) {
return code === 35 ? (effects.consume(code), sequenceFurther) : (effects.exit("atxHeadingSequence"), atBreak(code));
}
function data(code) {
return code === null || code === 35 || markdownLineEndingOrSpace(code) ? (effects.exit("atxHeadingText"), atBreak(code)) : (effects.consume(code), data);
}
}
 
// node_modules/micromark-util-html-tag-name/index.js
var htmlBlockNames = [
"address",
"article",
"aside",
"base",
"basefont",
"blockquote",
"body",
"caption",
"center",
"col",
"colgroup",
"dd",
"details",
"dialog",
"dir",
"div",
"dl",
"dt",
"fieldset",
"figcaption",
"figure",
"footer",
"form",
"frame",
"frameset",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"head",
"header",
"hr",
"html",
"iframe",
"legend",
"li",
"link",
"main",
"menu",
"menuitem",
"nav",
"noframes",
"ol",
"optgroup",
"option",
"p",
"param",
"search",
"section",
"summary",
"table",
"tbody",
"td",
"tfoot",
"th",
"thead",
"title",
"tr",
"track",
"ul"
], htmlRawNames = ["pre", "script", "style", "textarea"];
 
// node_modules/micromark-core-commonmark/lib/html-flow.js
var htmlFlow = {
name: "htmlFlow",
tokenize: tokenizeHtmlFlow,
resolveTo: resolveToHtmlFlow,
concrete: !0
}, blankLineBefore = {
tokenize: tokenizeBlankLineBefore,
partial: !0
}, nonLazyContinuationStart = {
tokenize: tokenizeNonLazyContinuationStart,
partial: !0
};
function resolveToHtmlFlow(events) {
let index2 = events.length;
for (; index2-- && !(events[index2][0] === "enter" && events[index2][1].type === "htmlFlow"); )
;
return index2 > 1 && events[index2 - 2][1].type === "linePrefix" && (events[index2][1].start = events[index2 - 2][1].start, events[index2 + 1][1].start = events[index2 - 2][1].start, events.splice(index2 - 2, 2)), events;
}
function tokenizeHtmlFlow(effects, ok2, nok) {
let self2 = this, marker, closingTag, buffer, index2, markerB;
return start;
function start(code) {
return before(code);
}
function before(code) {
return effects.enter("htmlFlow"), effects.enter("htmlFlowData"), effects.consume(code), open;
}
function open(code) {
return code === 33 ? (effects.consume(code), declarationOpen) : code === 47 ? (effects.consume(code), closingTag = !0, tagCloseStart) : code === 63 ? (effects.consume(code), marker = 3, self2.interrupt ? ok2 : continuationDeclarationInside) : asciiAlpha(code) ? (effects.consume(code), buffer = String.fromCharCode(code), tagName) : nok(code);
}
function declarationOpen(code) {
return code === 45 ? (effects.consume(code), marker = 2, commentOpenInside) : code === 91 ? (effects.consume(code), marker = 5, index2 = 0, cdataOpenInside) : asciiAlpha(code) ? (effects.consume(code), marker = 4, self2.interrupt ? ok2 : continuationDeclarationInside) : nok(code);
}
function commentOpenInside(code) {
return code === 45 ? (effects.consume(code), self2.interrupt ? ok2 : continuationDeclarationInside) : nok(code);
}
function cdataOpenInside(code) {
let value = "CDATA[";
return code === value.charCodeAt(index2++) ? (effects.consume(code), index2 === value.length ? self2.interrupt ? ok2 : continuation : cdataOpenInside) : nok(code);
}
function tagCloseStart(code) {
return asciiAlpha(code) ? (effects.consume(code), buffer = String.fromCharCode(code), tagName) : nok(code);
}
function tagName(code) {
if (code === null || code === 47 || code === 62 || markdownLineEndingOrSpace(code)) {
let slash = code === 47, name = buffer.toLowerCase();
return !slash && !closingTag && htmlRawNames.includes(name) ? (marker = 1, self2.interrupt ? ok2(code) : continuation(code)) : htmlBlockNames.includes(buffer.toLowerCase()) ? (marker = 6, slash ? (effects.consume(code), basicSelfClosing) : self2.interrupt ? ok2(code) : continuation(code)) : (marker = 7, self2.interrupt && !self2.parser.lazy[self2.now().line] ? nok(code) : closingTag ? completeClosingTagAfter(code) : completeAttributeNameBefore(code));
}
return code === 45 || asciiAlphanumeric(code) ? (effects.consume(code), buffer += String.fromCharCode(code), tagName) : nok(code);
}
function basicSelfClosing(code) {
return code === 62 ? (effects.consume(code), self2.interrupt ? ok2 : continuation) : nok(code);
}
function completeClosingTagAfter(code) {
return markdownSpace(code) ? (effects.consume(code), completeClosingTagAfter) : completeEnd(code);
}
function completeAttributeNameBefore(code) {
return code === 47 ? (effects.consume(code), completeEnd) : code === 58 || code === 95 || asciiAlpha(code) ? (effects.consume(code), completeAttributeName) : markdownSpace(code) ? (effects.consume(code), completeAttributeNameBefore) : completeEnd(code);
}
function completeAttributeName(code) {
return code === 45 || code === 46 || code === 58 || code === 95 || asciiAlphanumeric(code) ? (effects.consume(code), completeAttributeName) : completeAttributeNameAfter(code);
}
function completeAttributeNameAfter(code) {
return code === 61 ? (effects.consume(code), completeAttributeValueBefore) : markdownSpace(code) ? (effects.consume(code), completeAttributeNameAfter) : completeAttributeNameBefore(code);
}
function completeAttributeValueBefore(code) {
return code === null || code === 60 || code === 61 || code === 62 || code === 96 ? nok(code) : code === 34 || code === 39 ? (effects.consume(code), markerB = code, completeAttributeValueQuoted) : markdownSpace(code) ? (effects.consume(code), completeAttributeValueBefore) : completeAttributeValueUnquoted(code);
}
function completeAttributeValueQuoted(code) {
return code === markerB ? (effects.consume(code), markerB = null, completeAttributeValueQuotedAfter) : code === null || markdownLineEnding(code) ? nok(code) : (effects.consume(code), completeAttributeValueQuoted);
}
function completeAttributeValueUnquoted(code) {
return code === null || code === 34 || code === 39 || code === 47 || code === 60 || code === 61 || code === 62 || code === 96 || markdownLineEndingOrSpace(code) ? completeAttributeNameAfter(code) : (effects.consume(code), completeAttributeValueUnquoted);
}
function completeAttributeValueQuotedAfter(code) {
return code === 47 || code === 62 || markdownSpace(code) ? completeAttributeNameBefore(code) : nok(code);
}
function completeEnd(code) {
return code === 62 ? (effects.consume(code), completeAfter) : nok(code);
}
function completeAfter(code) {
return code === null || markdownLineEnding(code) ? continuation(code) : markdownSpace(code) ? (effects.consume(code), completeAfter) : nok(code);
}
function continuation(code) {
return code === 45 && marker === 2 ? (effects.consume(code), continuationCommentInside) : code === 60 && marker === 1 ? (effects.consume(code), continuationRawTagOpen) : code === 62 && marker === 4 ? (effects.consume(code), continuationClose) : code === 63 && marker === 3 ? (effects.consume(code), continuationDeclarationInside) : code === 93 && marker === 5 ? (effects.consume(code), continuationCdataInside) : markdownLineEnding(code) && (marker === 6 || marker === 7) ? (effects.exit("htmlFlowData"), effects.check(blankLineBefore, continuationAfter, continuationStart)(code)) : code === null || markdownLineEnding(code) ? (effects.exit("htmlFlowData"), continuationStart(code)) : (effects.consume(code), continuation);
}
function continuationStart(code) {
return effects.check(nonLazyContinuationStart, continuationStartNonLazy, continuationAfter)(code);
}
function continuationStartNonLazy(code) {
return effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), continuationBefore;
}
function continuationBefore(code) {
return code === null || markdownLineEnding(code) ? continuationStart(code) : (effects.enter("htmlFlowData"), continuation(code));
}
function continuationCommentInside(code) {
return code === 45 ? (effects.consume(code), continuationDeclarationInside) : continuation(code);
}
function continuationRawTagOpen(code) {
return code === 47 ? (effects.consume(code), buffer = "", continuationRawEndTag) : continuation(code);
}
function continuationRawEndTag(code) {
if (code === 62) {
let name = buffer.toLowerCase();
return htmlRawNames.includes(name) ? (effects.consume(code), continuationClose) : continuation(code);
}
return asciiAlpha(code) && buffer.length < 8 ? (effects.consume(code), buffer += String.fromCharCode(code), continuationRawEndTag) : continuation(code);
}
function continuationCdataInside(code) {
return code === 93 ? (effects.consume(code), continuationDeclarationInside) : continuation(code);
}
function continuationDeclarationInside(code) {
return code === 62 ? (effects.consume(code), continuationClose) : code === 45 && marker === 2 ? (effects.consume(code), continuationDeclarationInside) : continuation(code);
}
function continuationClose(code) {
return code === null || markdownLineEnding(code) ? (effects.exit("htmlFlowData"), continuationAfter(code)) : (effects.consume(code), continuationClose);
}
function continuationAfter(code) {
return effects.exit("htmlFlow"), ok2(code);
}
}
function tokenizeNonLazyContinuationStart(effects, ok2, nok) {
let self2 = this;
return start;
function start(code) {
return markdownLineEnding(code) ? (effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), after) : nok(code);
}
function after(code) {
return self2.parser.lazy[self2.now().line] ? nok(code) : ok2(code);
}
}
function tokenizeBlankLineBefore(effects, ok2, nok) {
return start;
function start(code) {
return effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), effects.attempt(blankLine, ok2, nok);
}
}
 
// node_modules/micromark-core-commonmark/lib/html-text.js
var htmlText = {
name: "htmlText",
tokenize: tokenizeHtmlText
};
function tokenizeHtmlText(effects, ok2, nok) {
let self2 = this, marker, index2, returnState;
return start;
function start(code) {
return effects.enter("htmlText"), effects.enter("htmlTextData"), effects.consume(code), open;
}
function open(code) {
return code === 33 ? (effects.consume(code), declarationOpen) : code === 47 ? (effects.consume(code), tagCloseStart) : code === 63 ? (effects.consume(code), instruction) : asciiAlpha(code) ? (effects.consume(code), tagOpen) : nok(code);
}
function declarationOpen(code) {
return code === 45 ? (effects.consume(code), commentOpenInside) : code === 91 ? (effects.consume(code), index2 = 0, cdataOpenInside) : asciiAlpha(code) ? (effects.consume(code), declaration) : nok(code);
}
function commentOpenInside(code) {
return code === 45 ? (effects.consume(code), commentEnd) : nok(code);
}
function comment(code) {
return code === null ? nok(code) : code === 45 ? (effects.consume(code), commentClose) : markdownLineEnding(code) ? (returnState = comment, lineEndingBefore(code)) : (effects.consume(code), comment);
}
function commentClose(code) {
return code === 45 ? (effects.consume(code), commentEnd) : comment(code);
}
function commentEnd(code) {
return code === 62 ? end(code) : code === 45 ? commentClose(code) : comment(code);
}
function cdataOpenInside(code) {
let value = "CDATA[";
return code === value.charCodeAt(index2++) ? (effects.consume(code), index2 === value.length ? cdata : cdataOpenInside) : nok(code);
}
function cdata(code) {
return code === null ? nok(code) : code === 93 ? (effects.consume(code), cdataClose) : markdownLineEnding(code) ? (returnState = cdata, lineEndingBefore(code)) : (effects.consume(code), cdata);
}
function cdataClose(code) {
return code === 93 ? (effects.consume(code), cdataEnd) : cdata(code);
}
function cdataEnd(code) {
return code === 62 ? end(code) : code === 93 ? (effects.consume(code), cdataEnd) : cdata(code);
}
function declaration(code) {
return code === null || code === 62 ? end(code) : markdownLineEnding(code) ? (returnState = declaration, lineEndingBefore(code)) : (effects.consume(code), declaration);
}
function instruction(code) {
return code === null ? nok(code) : code === 63 ? (effects.consume(code), instructionClose) : markdownLineEnding(code) ? (returnState = instruction, lineEndingBefore(code)) : (effects.consume(code), instruction);
}
function instructionClose(code) {
return code === 62 ? end(code) : instruction(code);
}
function tagCloseStart(code) {
return asciiAlpha(code) ? (effects.consume(code), tagClose) : nok(code);
}
function tagClose(code) {
return code === 45 || asciiAlphanumeric(code) ? (effects.consume(code), tagClose) : tagCloseBetween(code);
}
function tagCloseBetween(code) {
return markdownLineEnding(code) ? (returnState = tagCloseBetween, lineEndingBefore(code)) : markdownSpace(code) ? (effects.consume(code), tagCloseBetween) : end(code);
}
function tagOpen(code) {
return code === 45 || asciiAlphanumeric(code) ? (effects.consume(code), tagOpen) : code === 47 || code === 62 || markdownLineEndingOrSpace(code) ? tagOpenBetween(code) : nok(code);
}
function tagOpenBetween(code) {
return code === 47 ? (effects.consume(code), end) : code === 58 || code === 95 || asciiAlpha(code) ? (effects.consume(code), tagOpenAttributeName) : markdownLineEnding(code) ? (returnState = tagOpenBetween, lineEndingBefore(code)) : markdownSpace(code) ? (effects.consume(code), tagOpenBetween) : end(code);
}
function tagOpenAttributeName(code) {
return code === 45 || code === 46 || code === 58 || code === 95 || asciiAlphanumeric(code) ? (effects.consume(code), tagOpenAttributeName) : tagOpenAttributeNameAfter(code);
}
function tagOpenAttributeNameAfter(code) {
return code === 61 ? (effects.consume(code), tagOpenAttributeValueBefore) : markdownLineEnding(code) ? (returnState = tagOpenAttributeNameAfter, lineEndingBefore(code)) : markdownSpace(code) ? (effects.consume(code), tagOpenAttributeNameAfter) : tagOpenBetween(code);
}
function tagOpenAttributeValueBefore(code) {
return code === null || code === 60 || code === 61 || code === 62 || code === 96 ? nok(code) : code === 34 || code === 39 ? (effects.consume(code), marker = code, tagOpenAttributeValueQuoted) : markdownLineEnding(code) ? (returnState = tagOpenAttributeValueBefore, lineEndingBefore(code)) : markdownSpace(code) ? (effects.consume(code), tagOpenAttributeValueBefore) : (effects.consume(code), tagOpenAttributeValueUnquoted);
}
function tagOpenAttributeValueQuoted(code) {
return code === marker ? (effects.consume(code), marker = void 0, tagOpenAttributeValueQuotedAfter) : code === null ? nok(code) : markdownLineEnding(code) ? (returnState = tagOpenAttributeValueQuoted, lineEndingBefore(code)) : (effects.consume(code), tagOpenAttributeValueQuoted);
}
function tagOpenAttributeValueUnquoted(code) {
return code === null || code === 34 || code === 39 || code === 60 || code === 61 || code === 96 ? nok(code) : code === 47 || code === 62 || markdownLineEndingOrSpace(code) ? tagOpenBetween(code) : (effects.consume(code), tagOpenAttributeValueUnquoted);
}
function tagOpenAttributeValueQuotedAfter(code) {
return code === 47 || code === 62 || markdownLineEndingOrSpace(code) ? tagOpenBetween(code) : nok(code);
}
function end(code) {
return code === 62 ? (effects.consume(code), effects.exit("htmlTextData"), effects.exit("htmlText"), ok2) : nok(code);
}
function lineEndingBefore(code) {
return effects.exit("htmlTextData"), effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), lineEndingAfter;
}
function lineEndingAfter(code) {
return markdownSpace(code) ? factorySpace(effects, lineEndingAfterPrefix, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code) : lineEndingAfterPrefix(code);
}
function lineEndingAfterPrefix(code) {
return effects.enter("htmlTextData"), returnState(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/label-end.js
var labelEnd = {
name: "labelEnd",
tokenize: tokenizeLabelEnd,
resolveTo: resolveToLabelEnd,
resolveAll: resolveAllLabelEnd
}, resourceConstruct = {
tokenize: tokenizeResource
}, referenceFullConstruct = {
tokenize: tokenizeReferenceFull
}, referenceCollapsedConstruct = {
tokenize: tokenizeReferenceCollapsed
};
function resolveAllLabelEnd(events) {
let index2 = -1;
for (; ++index2 < events.length; ) {
let token = events[index2][1];
(token.type === "labelImage" || token.type === "labelLink" || token.type === "labelEnd") && (events.splice(index2 + 1, token.type === "labelImage" ? 4 : 2), token.type = "data", index2++);
}
return events;
}
function resolveToLabelEnd(events, context) {
let index2 = events.length, offset = 0, token, open, close, media;
for (; index2--; )
if (token = events[index2][1], open) {
if (token.type === "link" || token.type === "labelLink" && token._inactive)
break;
events[index2][0] === "enter" && token.type === "labelLink" && (token._inactive = !0);
} else if (close) {
if (events[index2][0] === "enter" && (token.type === "labelImage" || token.type === "labelLink") && !token._balanced && (open = index2, token.type !== "labelLink")) {
offset = 2;
break;
}
} else token.type === "labelEnd" && (close = index2);
let group = {
type: events[open][1].type === "labelLink" ? "link" : "image",
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[events.length - 1][1].end)
}, label = {
type: "label",
start: Object.assign({}, events[open][1].start),
end: Object.assign({}, events[close][1].end)
}, text3 = {
type: "labelText",
start: Object.assign({}, events[open + offset + 2][1].end),
end: Object.assign({}, events[close - 2][1].start)
};
return media = [["enter", group, context], ["enter", label, context]], media = push(media, events.slice(open + 1, open + offset + 3)), media = push(media, [["enter", text3, context]]), media = push(media, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + offset + 4, close - 3), context)), media = push(media, [["exit", text3, context], events[close - 2], events[close - 1], ["exit", label, context]]), media = push(media, events.slice(close + 1)), media = push(media, [["exit", group, context]]), splice(events, open, events.length, media), events;
}
function tokenizeLabelEnd(effects, ok2, nok) {
let self2 = this, index2 = self2.events.length, labelStart, defined;
for (; index2--; )
if ((self2.events[index2][1].type === "labelImage" || self2.events[index2][1].type === "labelLink") && !self2.events[index2][1]._balanced) {
labelStart = self2.events[index2][1];
break;
}
return start;
function start(code) {
return labelStart ? labelStart._inactive ? labelEndNok(code) : (defined = self2.parser.defined.includes(normalizeIdentifier(self2.sliceSerialize({
start: labelStart.end,
end: self2.now()
}))), effects.enter("labelEnd"), effects.enter("labelMarker"), effects.consume(code), effects.exit("labelMarker"), effects.exit("labelEnd"), after) : nok(code);
}
function after(code) {
return code === 40 ? effects.attempt(resourceConstruct, labelEndOk, defined ? labelEndOk : labelEndNok)(code) : code === 91 ? effects.attempt(referenceFullConstruct, labelEndOk, defined ? referenceNotFull : labelEndNok)(code) : defined ? labelEndOk(code) : labelEndNok(code);
}
function referenceNotFull(code) {
return effects.attempt(referenceCollapsedConstruct, labelEndOk, labelEndNok)(code);
}
function labelEndOk(code) {
return ok2(code);
}
function labelEndNok(code) {
return labelStart._balanced = !0, nok(code);
}
}
function tokenizeResource(effects, ok2, nok) {
return resourceStart;
function resourceStart(code) {
return effects.enter("resource"), effects.enter("resourceMarker"), effects.consume(code), effects.exit("resourceMarker"), resourceBefore;
}
function resourceBefore(code) {
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceOpen)(code) : resourceOpen(code);
}
function resourceOpen(code) {
return code === 41 ? resourceEnd(code) : factoryDestination(effects, resourceDestinationAfter, resourceDestinationMissing, "resourceDestination", "resourceDestinationLiteral", "resourceDestinationLiteralMarker", "resourceDestinationRaw", "resourceDestinationString", 32)(code);
}
function resourceDestinationAfter(code) {
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceBetween)(code) : resourceEnd(code);
}
function resourceDestinationMissing(code) {
return nok(code);
}
function resourceBetween(code) {
return code === 34 || code === 39 || code === 40 ? factoryTitle(effects, resourceTitleAfter, nok, "resourceTitle", "resourceTitleMarker", "resourceTitleString")(code) : resourceEnd(code);
}
function resourceTitleAfter(code) {
return markdownLineEndingOrSpace(code) ? factoryWhitespace(effects, resourceEnd)(code) : resourceEnd(code);
}
function resourceEnd(code) {
return code === 41 ? (effects.enter("resourceMarker"), effects.consume(code), effects.exit("resourceMarker"), effects.exit("resource"), ok2) : nok(code);
}
}
function tokenizeReferenceFull(effects, ok2, nok) {
let self2 = this;
return referenceFull;
function referenceFull(code) {
return factoryLabel.call(self2, effects, referenceFullAfter, referenceFullMissing, "reference", "referenceMarker", "referenceString")(code);
}
function referenceFullAfter(code) {
return self2.parser.defined.includes(normalizeIdentifier(self2.sliceSerialize(self2.events[self2.events.length - 1][1]).slice(1, -1))) ? ok2(code) : nok(code);
}
function referenceFullMissing(code) {
return nok(code);
}
}
function tokenizeReferenceCollapsed(effects, ok2, nok) {
return referenceCollapsedStart;
function referenceCollapsedStart(code) {
return effects.enter("reference"), effects.enter("referenceMarker"), effects.consume(code), effects.exit("referenceMarker"), referenceCollapsedOpen;
}
function referenceCollapsedOpen(code) {
return code === 93 ? (effects.enter("referenceMarker"), effects.consume(code), effects.exit("referenceMarker"), effects.exit("reference"), ok2) : nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/label-start-image.js
var labelStartImage = {
name: "labelStartImage",
tokenize: tokenizeLabelStartImage,
resolveAll: labelEnd.resolveAll
};
function tokenizeLabelStartImage(effects, ok2, nok) {
let self2 = this;
return start;
function start(code) {
return effects.enter("labelImage"), effects.enter("labelImageMarker"), effects.consume(code), effects.exit("labelImageMarker"), open;
}
function open(code) {
return code === 91 ? (effects.enter("labelMarker"), effects.consume(code), effects.exit("labelMarker"), effects.exit("labelImage"), after) : nok(code);
}
function after(code) {
return code === 94 && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code) : ok2(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/label-start-link.js
var labelStartLink = {
name: "labelStartLink",
tokenize: tokenizeLabelStartLink,
resolveAll: labelEnd.resolveAll
};
function tokenizeLabelStartLink(effects, ok2, nok) {
let self2 = this;
return start;
function start(code) {
return effects.enter("labelLink"), effects.enter("labelMarker"), effects.consume(code), effects.exit("labelMarker"), effects.exit("labelLink"), after;
}
function after(code) {
return code === 94 && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code) : ok2(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/line-ending.js
var lineEnding = {
name: "lineEnding",
tokenize: tokenizeLineEnding
};
function tokenizeLineEnding(effects, ok2) {
return start;
function start(code) {
return effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), factorySpace(effects, ok2, "linePrefix");
}
}
 
// node_modules/micromark-core-commonmark/lib/thematic-break.js
var thematicBreak = {
name: "thematicBreak",
tokenize: tokenizeThematicBreak
};
function tokenizeThematicBreak(effects, ok2, nok) {
let size = 0, marker;
return start;
function start(code) {
return effects.enter("thematicBreak"), before(code);
}
function before(code) {
return marker = code, atBreak(code);
}
function atBreak(code) {
return code === marker ? (effects.enter("thematicBreakSequence"), sequence(code)) : size >= 3 && (code === null || markdownLineEnding(code)) ? (effects.exit("thematicBreak"), ok2(code)) : nok(code);
}
function sequence(code) {
return code === marker ? (effects.consume(code), size++, sequence) : (effects.exit("thematicBreakSequence"), markdownSpace(code) ? factorySpace(effects, atBreak, "whitespace")(code) : atBreak(code));
}
}
 
// node_modules/micromark-core-commonmark/lib/list.js
var list = {
name: "list",
tokenize: tokenizeListStart,
continuation: {
tokenize: tokenizeListContinuation
},
exit: tokenizeListEnd
}, listItemPrefixWhitespaceConstruct = {
tokenize: tokenizeListItemPrefixWhitespace,
partial: !0
}, indentConstruct = {
tokenize: tokenizeIndent,
partial: !0
};
function tokenizeListStart(effects, ok2, nok) {
let self2 = this, tail = self2.events[self2.events.length - 1], initialSize = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], !0).length : 0, size = 0;
return start;
function start(code) {
let kind = self2.containerState.type || (code === 42 || code === 43 || code === 45 ? "listUnordered" : "listOrdered");
if (kind === "listUnordered" ? !self2.containerState.marker || code === self2.containerState.marker : asciiDigit(code)) {
if (self2.containerState.type || (self2.containerState.type = kind, effects.enter(kind, {
_container: !0
})), kind === "listUnordered")
return effects.enter("listItemPrefix"), code === 42 || code === 45 ? effects.check(thematicBreak, nok, atMarker)(code) : atMarker(code);
if (!self2.interrupt || code === 49)
return effects.enter("listItemPrefix"), effects.enter("listItemValue"), inside(code);
}
return nok(code);
}
function inside(code) {
return asciiDigit(code) && ++size < 10 ? (effects.consume(code), inside) : (!self2.interrupt || size < 2) && (self2.containerState.marker ? code === self2.containerState.marker : code === 41 || code === 46) ? (effects.exit("listItemValue"), atMarker(code)) : nok(code);
}
function atMarker(code) {
return effects.enter("listItemMarker"), effects.consume(code), effects.exit("listItemMarker"), self2.containerState.marker = self2.containerState.marker || code, effects.check(
blankLine,
// Can’t be empty when interrupting.
self2.interrupt ? nok : onBlank,
effects.attempt(listItemPrefixWhitespaceConstruct, endOfPrefix, otherPrefix)
);
}
function onBlank(code) {
return self2.containerState.initialBlankLine = !0, initialSize++, endOfPrefix(code);
}
function otherPrefix(code) {
return markdownSpace(code) ? (effects.enter("listItemPrefixWhitespace"), effects.consume(code), effects.exit("listItemPrefixWhitespace"), endOfPrefix) : nok(code);
}
function endOfPrefix(code) {
return self2.containerState.size = initialSize + self2.sliceSerialize(effects.exit("listItemPrefix"), !0).length, ok2(code);
}
}
function tokenizeListContinuation(effects, ok2, nok) {
let self2 = this;
return self2.containerState._closeFlow = void 0, effects.check(blankLine, onBlank, notBlank);
function onBlank(code) {
return self2.containerState.furtherBlankLines = self2.containerState.furtherBlankLines || self2.containerState.initialBlankLine, factorySpace(effects, ok2, "listItemIndent", self2.containerState.size + 1)(code);
}
function notBlank(code) {
return self2.containerState.furtherBlankLines || !markdownSpace(code) ? (self2.containerState.furtherBlankLines = void 0, self2.containerState.initialBlankLine = void 0, notInCurrentItem(code)) : (self2.containerState.furtherBlankLines = void 0, self2.containerState.initialBlankLine = void 0, effects.attempt(indentConstruct, ok2, notInCurrentItem)(code));
}
function notInCurrentItem(code) {
return self2.containerState._closeFlow = !0, self2.interrupt = void 0, factorySpace(effects, effects.attempt(list, ok2, nok), "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code);
}
}
function tokenizeIndent(effects, ok2, nok) {
let self2 = this;
return factorySpace(effects, afterPrefix, "listItemIndent", self2.containerState.size + 1);
function afterPrefix(code) {
let tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "listItemIndent" && tail[2].sliceSerialize(tail[1], !0).length === self2.containerState.size ? ok2(code) : nok(code);
}
}
function tokenizeListEnd(effects) {
effects.exit(this.containerState.type);
}
function tokenizeListItemPrefixWhitespace(effects, ok2, nok) {
let self2 = this;
return factorySpace(effects, afterPrefix, "listItemPrefixWhitespace", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 5);
function afterPrefix(code) {
let tail = self2.events[self2.events.length - 1];
return !markdownSpace(code) && tail && tail[1].type === "listItemPrefixWhitespace" ? ok2(code) : nok(code);
}
}
 
// node_modules/micromark-core-commonmark/lib/setext-underline.js
var setextUnderline = {
name: "setextUnderline",
tokenize: tokenizeSetextUnderline,
resolveTo: resolveToSetextUnderline
};
function resolveToSetextUnderline(events, context) {
let index2 = events.length, content3, text3, definition2;
for (; index2--; )
if (events[index2][0] === "enter") {
if (events[index2][1].type === "content") {
content3 = index2;
break;
}
events[index2][1].type === "paragraph" && (text3 = index2);
} else
events[index2][1].type === "content" && events.splice(index2, 1), !definition2 && events[index2][1].type === "definition" && (definition2 = index2);
let heading = {
type: "setextHeading",
start: Object.assign({}, events[text3][1].start),
end: Object.assign({}, events[events.length - 1][1].end)
};
return events[text3][1].type = "setextHeadingText", definition2 ? (events.splice(text3, 0, ["enter", heading, context]), events.splice(definition2 + 1, 0, ["exit", events[content3][1], context]), events[content3][1].end = Object.assign({}, events[definition2][1].end)) : events[content3][1] = heading, events.push(["exit", heading, context]), events;
}
function tokenizeSetextUnderline(effects, ok2, nok) {
let self2 = this, marker;
return start;
function start(code) {
let index2 = self2.events.length, paragraph;
for (; index2--; )
if (self2.events[index2][1].type !== "lineEnding" && self2.events[index2][1].type !== "linePrefix" && self2.events[index2][1].type !== "content") {
paragraph = self2.events[index2][1].type === "paragraph";
break;
}
return !self2.parser.lazy[self2.now().line] && (self2.interrupt || paragraph) ? (effects.enter("setextHeadingLine"), marker = code, before(code)) : nok(code);
}
function before(code) {
return effects.enter("setextHeadingLineSequence"), inside(code);
}
function inside(code) {
return code === marker ? (effects.consume(code), inside) : (effects.exit("setextHeadingLineSequence"), markdownSpace(code) ? factorySpace(effects, after, "lineSuffix")(code) : after(code));
}
function after(code) {
return code === null || markdownLineEnding(code) ? (effects.exit("setextHeadingLine"), ok2(code)) : nok(code);
}
}
 
// node_modules/micromark/lib/initialize/flow.js
var flow = {
tokenize: initializeFlow
};
function initializeFlow(effects) {
let self2 = this, initial = effects.attempt(
// Try to parse a blank line.
blankLine,
atBlankEnding,
// Try to parse initial flow (essentially, only code).
effects.attempt(
this.parser.constructs.flowInitial,
afterConstruct,
factorySpace(
effects,
effects.attempt(
this.parser.constructs.flow,
afterConstruct,
effects.attempt(content2, afterConstruct)
),
"linePrefix"
)
)
);
return initial;
function atBlankEnding(code) {
if (code === null) {
effects.consume(code);
return;
}
return effects.enter("lineEndingBlank"), effects.consume(code), effects.exit("lineEndingBlank"), self2.currentConstruct = void 0, initial;
}
function afterConstruct(code) {
if (code === null) {
effects.consume(code);
return;
}
return effects.enter("lineEnding"), effects.consume(code), effects.exit("lineEnding"), self2.currentConstruct = void 0, initial;
}
}
 
// node_modules/micromark/lib/initialize/text.js
var resolver = {
resolveAll: createResolver()
}, string = initializeFactory("string"), text = initializeFactory("text");
function initializeFactory(field) {
return {
tokenize: initializeText,
resolveAll: createResolver(
field === "text" ? resolveAllLineSuffixes : void 0
)
};
function initializeText(effects) {
let self2 = this, constructs2 = this.parser.constructs[field], text3 = effects.attempt(constructs2, start, notText);
return start;
function start(code) {
return atBreak(code) ? text3(code) : notText(code);
}
function notText(code) {
if (code === null) {
effects.consume(code);
return;
}
return effects.enter("data"), effects.consume(code), data;
}
function data(code) {
return atBreak(code) ? (effects.exit("data"), text3(code)) : (effects.consume(code), data);
}
function atBreak(code) {
if (code === null)
return !0;
let list2 = constructs2[code], index2 = -1;
if (list2)
for (; ++index2 < list2.length; ) {
let item = list2[index2];
if (!item.previous || item.previous.call(self2, self2.previous))
return !0;
}
return !1;
}
}
}
function createResolver(extraResolver) {
return resolveAllText;
function resolveAllText(events, context) {
let index2 = -1, enter;
for (; ++index2 <= events.length; )
enter === void 0 ? events[index2] && events[index2][1].type === "data" && (enter = index2, index2++) : (!events[index2] || events[index2][1].type !== "data") && (index2 !== enter + 2 && (events[enter][1].end = events[index2 - 1][1].end, events.splice(enter + 2, index2 - enter - 2), index2 = enter + 2), enter = void 0);
return extraResolver ? extraResolver(events, context) : events;
}
}
function resolveAllLineSuffixes(events, context) {
let eventIndex = 0;
for (; ++eventIndex <= events.length; )
if ((eventIndex === events.length || events[eventIndex][1].type === "lineEnding") && events[eventIndex - 1][1].type === "data") {
let data = events[eventIndex - 1][1], chunks = context.sliceStream(data), index2 = chunks.length, bufferIndex = -1, size = 0, tabs;
for (; index2--; ) {
let chunk = chunks[index2];
if (typeof chunk == "string") {
for (bufferIndex = chunk.length; chunk.charCodeAt(bufferIndex - 1) === 32; )
size++, bufferIndex--;
if (bufferIndex) break;
bufferIndex = -1;
} else if (chunk === -2)
tabs = !0, size++;
else if (chunk !== -1) {
index2++;
break;
}
}
if (size) {
let token = {
type: eventIndex === events.length || tabs || size < 2 ? "lineSuffix" : "hardBreakTrailing",
start: {
line: data.end.line,
column: data.end.column - size,
offset: data.end.offset - size,
_index: data.start._index + index2,
_bufferIndex: index2 ? bufferIndex : data.start._bufferIndex + bufferIndex
},
end: Object.assign({}, data.end)
};
data.end = Object.assign({}, token.start), data.start.offset === data.end.offset ? Object.assign(data, token) : (events.splice(
eventIndex,
0,
["enter", token, context],
["exit", token, context]
), eventIndex += 2);
}
eventIndex++;
}
return events;
}
 
// node_modules/micromark/lib/create-tokenizer.js
function createTokenizer(parser, initialize, from) {
let point3 = Object.assign(
from ? Object.assign({}, from) : {
line: 1,
column: 1,
offset: 0
},
{
_index: 0,
_bufferIndex: -1
}
), columnStart = {}, resolveAllConstructs = [], chunks = [], stack = [], consumed = !0, effects = {
consume,
enter,
exit: exit2,
attempt: constructFactory(onsuccessfulconstruct),
check: constructFactory(onsuccessfulcheck),
interrupt: constructFactory(onsuccessfulcheck, {
interrupt: !0
})
}, context = {
previous: null,
code: null,
containerState: {},
events: [],
parser,
sliceStream,
sliceSerialize,
now,
defineSkip,
write
}, state = initialize.tokenize.call(context, effects), expectedCode;
return initialize.resolveAll && resolveAllConstructs.push(initialize), context;
function write(slice) {
return chunks = push(chunks, slice), main(), chunks[chunks.length - 1] !== null ? [] : (addResult(initialize, 0), context.events = resolveAll(resolveAllConstructs, context.events, context), context.events);
}
function sliceSerialize(token, expandTabs) {
return serializeChunks(sliceStream(token), expandTabs);
}
function sliceStream(token) {
return sliceChunks(chunks, token);
}
function now() {
let { line, column, offset, _index, _bufferIndex } = point3;
return {
line,
column,
offset,
_index,
_bufferIndex
};
}
function defineSkip(value) {
columnStart[value.line] = value.column, accountForPotentialSkip();
}
function main() {
let chunkIndex;
for (; point3._index < chunks.length; ) {
let chunk = chunks[point3._index];
if (typeof chunk == "string")
for (chunkIndex = point3._index, point3._bufferIndex < 0 && (point3._bufferIndex = 0); point3._index === chunkIndex && point3._bufferIndex < chunk.length; )
go(chunk.charCodeAt(point3._bufferIndex));
else
go(chunk);
}
}
function go(code) {
consumed = void 0, expectedCode = code, state = state(code);
}
function consume(code) {
markdownLineEnding(code) ? (point3.line++, point3.column = 1, point3.offset += code === -3 ? 2 : 1, accountForPotentialSkip()) : code !== -1 && (point3.column++, point3.offset++), point3._bufferIndex < 0 ? point3._index++ : (point3._bufferIndex++, point3._bufferIndex === chunks[point3._index].length && (point3._bufferIndex = -1, point3._index++)), context.previous = code, consumed = !0;
}
function enter(type, fields) {
let token = fields || {};
return token.type = type, token.start = now(), context.events.push(["enter", token, context]), stack.push(token), token;
}
function exit2(type) {
let token = stack.pop();
return token.end = now(), context.events.push(["exit", token, context]), token;
}
function onsuccessfulconstruct(construct, info) {
addResult(construct, info.from);
}
function onsuccessfulcheck(_, info) {
info.restore();
}
function constructFactory(onreturn, fields) {
return hook;
function hook(constructs2, returnState, bogusState) {
let listOfConstructs, constructIndex, currentConstruct, info;
return Array.isArray(constructs2) ? handleListOfConstructs(constructs2) : "tokenize" in constructs2 ? (
// @ts-expect-error Looks like a construct.
handleListOfConstructs([constructs2])
) : handleMapOfConstructs(constructs2);
function handleMapOfConstructs(map) {
return start;
function start(code) {
let def = code !== null && map[code], all2 = code !== null && map.null, list2 = [
// To do: add more extension tests.
/* c8 ignore next 2 */
...Array.isArray(def) ? def : def ? [def] : [],
...Array.isArray(all2) ? all2 : all2 ? [all2] : []
];
return handleListOfConstructs(list2)(code);
}
}
function handleListOfConstructs(list2) {
return listOfConstructs = list2, constructIndex = 0, list2.length === 0 ? bogusState : handleConstruct(list2[constructIndex]);
}
function handleConstruct(construct) {
return start;
function start(code) {
return info = store(), currentConstruct = construct, construct.partial || (context.currentConstruct = construct), construct.name && context.parser.constructs.disable.null.includes(construct.name) ? nok(code) : construct.tokenize.call(
// If we do have fields, create an object w/ `context` as its
// prototype.
// This allows a “live binding”, which is needed for `interrupt`.
fields ? Object.assign(Object.create(context), fields) : context,
effects,
ok2,
nok
)(code);
}
}
function ok2(code) {
return consumed = !0, onreturn(currentConstruct, info), returnState;
}
function nok(code) {
return consumed = !0, info.restore(), ++constructIndex < listOfConstructs.length ? handleConstruct(listOfConstructs[constructIndex]) : bogusState;
}
}
}
function addResult(construct, from2) {
construct.resolveAll && !resolveAllConstructs.includes(construct) && resolveAllConstructs.push(construct), construct.resolve && splice(
context.events,
from2,
context.events.length - from2,
construct.resolve(context.events.slice(from2), context)
), construct.resolveTo && (context.events = construct.resolveTo(context.events, context));
}
function store() {
let startPoint = now(), startPrevious = context.previous, startCurrentConstruct = context.currentConstruct, startEventsIndex = context.events.length, startStack = Array.from(stack);
return {
restore,
from: startEventsIndex
};
function restore() {
point3 = startPoint, context.previous = startPrevious, context.currentConstruct = startCurrentConstruct, context.events.length = startEventsIndex, stack = startStack, accountForPotentialSkip();
}
}
function accountForPotentialSkip() {
point3.line in columnStart && point3.column < 2 && (point3.column = columnStart[point3.line], point3.offset += columnStart[point3.line] - 1);
}
}
function sliceChunks(chunks, token) {
let startIndex = token.start._index, startBufferIndex = token.start._bufferIndex, endIndex = token.end._index, endBufferIndex = token.end._bufferIndex, view;
if (startIndex === endIndex)
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)];
else {
if (view = chunks.slice(startIndex, endIndex), startBufferIndex > -1) {
let head = view[0];
typeof head == "string" ? view[0] = head.slice(startBufferIndex) : view.shift();
}
endBufferIndex > 0 && view.push(chunks[endIndex].slice(0, endBufferIndex));
}
return view;
}
function serializeChunks(chunks, expandTabs) {
let index2 = -1, result = [], atTab;
for (; ++index2 < chunks.length; ) {
let chunk = chunks[index2], value;
if (typeof chunk == "string")
value = chunk;
else
switch (chunk) {
case -5: {
value = "\r";
break;
}
case -4: {
value = `
`;
break;
}
case -3: {
value = `\r
`;
break;
}
case -2: {
value = expandTabs ? " " : " ";
break;
}
case -1: {
if (!expandTabs && atTab) continue;
value = " ";
break;
}
default:
value = String.fromCharCode(chunk);
}
atTab = chunk === -2, result.push(value);
}
return result.join("");
}
 
// node_modules/micromark/lib/constructs.js
var constructs_exports = {};
__export(constructs_exports, {
attentionMarkers: () => attentionMarkers,
contentInitial: () => contentInitial,
disable: () => disable,
document: () => document3,
flow: () => flow2,
flowInitial: () => flowInitial,
insideSpan: () => insideSpan,
string: () => string2,
text: () => text2
});
var document3 = {
42: list,
43: list,
45: list,
48: list,
49: list,
50: list,
51: list,
52: list,
53: list,
54: list,
55: list,
56: list,
57: list,
62: blockQuote
}, contentInitial = {
91: definition
}, flowInitial = {
[-2]: codeIndented,
[-1]: codeIndented,
32: codeIndented
}, flow2 = {
35: headingAtx,
42: thematicBreak,
45: [setextUnderline, thematicBreak],
60: htmlFlow,
61: setextUnderline,
95: thematicBreak,
96: codeFenced,
126: codeFenced
}, string2 = {
38: characterReference,
92: characterEscape
}, text2 = {
[-5]: lineEnding,
[-4]: lineEnding,
[-3]: lineEnding,
33: labelStartImage,
38: characterReference,
42: attention,
60: [autolink, htmlText],
91: labelStartLink,
92: [hardBreakEscape, characterEscape],
93: labelEnd,
95: attention,
96: codeText
}, insideSpan = {
null: [attention, resolver]
}, attentionMarkers = {
null: [42, 95]
}, disable = {
null: []
};
 
// node_modules/micromark/lib/parse.js
function parse(options) {
let constructs2 = (
/** @type {FullNormalizedExtension} */
combineExtensions([constructs_exports, ...(options || {}).extensions || []])
), parser = {
defined: [],
lazy: {},
constructs: constructs2,
content: create(content),
document: create(document2),
flow: create(flow),
string: create(string),
text: create(text)
};
return parser;
function create(initial) {
return creator;
function creator(from) {
return createTokenizer(parser, initial, from);
}
}
}
 
// node_modules/micromark/lib/postprocess.js
function postprocess(events) {
for (; !subtokenize(events); )
;
return events;
}
 
// node_modules/micromark/lib/preprocess.js
var search = /[\0\t\n\r]/g;
function preprocess() {
let column = 1, buffer = "", start = !0, atCarriageReturn;
return preprocessor;
function preprocessor(value, encoding, end) {
let chunks = [], match, next, startPosition, endPosition, code;
for (value = buffer + (typeof value == "string" ? value.toString() : new TextDecoder(encoding || void 0).decode(value)), startPosition = 0, buffer = "", start && (value.charCodeAt(0) === 65279 && startPosition++, start = void 0); startPosition < value.length; ) {
if (search.lastIndex = startPosition, match = search.exec(value), endPosition = match && match.index !== void 0 ? match.index : value.length, code = value.charCodeAt(endPosition), !match) {
buffer = value.slice(startPosition);
break;
}
if (code === 10 && startPosition === endPosition && atCarriageReturn)
chunks.push(-3), atCarriageReturn = void 0;
else
switch (atCarriageReturn && (chunks.push(-5), atCarriageReturn = void 0), startPosition < endPosition && (chunks.push(value.slice(startPosition, endPosition)), column += endPosition - startPosition), code) {
case 0: {
chunks.push(65533), column++;
break;
}
case 9: {
for (next = Math.ceil(column / 4) * 4, chunks.push(-2); column++ < next; ) chunks.push(-1);
break;
}
case 10: {
chunks.push(-4), column = 1;
break;
}
default:
atCarriageReturn = !0, column = 1;
}
startPosition = endPosition + 1;
}
return end && (atCarriageReturn && chunks.push(-5), buffer && chunks.push(buffer), chunks.push(null)), chunks;
}
}
 
// node_modules/micromark-util-decode-string/index.js
var characterEscapeOrReference = /\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi;
function decodeString(value) {
return value.replace(characterEscapeOrReference, decode);
}
function decode($0, $1, $2) {
if ($1)
return $1;
if ($2.charCodeAt(0) === 35) {
let head2 = $2.charCodeAt(1), hex = head2 === 120 || head2 === 88;
return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10);
}
return decodeNamedCharacterReference($2) || $0;
}
 
// node_modules/unist-util-stringify-position/lib/index.js
function stringifyPosition(value) {
return !value || typeof value != "object" ? "" : "position" in value || "type" in value ? position(value.position) : "start" in value || "end" in value ? position(value) : "line" in value || "column" in value ? point(value) : "";
}
function point(point3) {
return index(point3 && point3.line) + ":" + index(point3 && point3.column);
}
function position(pos) {
return point(pos && pos.start) + "-" + point(pos && pos.end);
}
function index(value) {
return value && typeof value == "number" ? value : 1;
}
 
// node_modules/mdast-util-from-markdown/lib/index.js
var own = {}.hasOwnProperty;
function fromMarkdown(value, encoding, options) {
return typeof encoding != "string" && (options = encoding, encoding = void 0), compiler(options)(postprocess(parse(options).document().write(preprocess()(value, encoding, !0))));
}
function compiler(options) {
let config = {
transforms: [],
canContainEols: ["emphasis", "fragment", "heading", "paragraph", "strong"],
enter: {
autolink: opener(link),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener(heading),
blockQuote: opener(blockQuote2),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener(codeFlow),
codeFencedFenceInfo: buffer,
codeFencedFenceMeta: buffer,
codeIndented: opener(codeFlow, buffer),
codeText: opener(codeText2, buffer),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener(definition2),
definitionDestinationString: buffer,
definitionLabelString: buffer,
definitionTitleString: buffer,
emphasis: opener(emphasis),
hardBreakEscape: opener(hardBreak),
hardBreakTrailing: opener(hardBreak),
htmlFlow: opener(html, buffer),
htmlFlowData: onenterdata,
htmlText: opener(html, buffer),
htmlTextData: onenterdata,
image: opener(image),
label: buffer,
link: opener(link),
listItem: opener(listItem),
listItemValue: onenterlistitemvalue,
listOrdered: opener(list2, onenterlistordered),
listUnordered: opener(list2),
paragraph: opener(paragraph),
reference: onenterreference,
referenceString: buffer,
resourceDestinationString: buffer,
resourceTitleString: buffer,
setextHeading: opener(heading),
strong: opener(strong),
thematicBreak: opener(thematicBreak2)
},
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
characterReference: onexitcharacterreference,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer()
}
};
configure(config, (options || {}).mdastExtensions || []);
let data = {};
return compile;
function compile(events) {
let tree = {
type: "root",
children: []
}, context = {
stack: [tree],
tokenStack: [],
config,
enter,
exit: exit2,
buffer,
resume,
data
}, listStack = [], index2 = -1;
for (; ++index2 < events.length; )
if (events[index2][1].type === "listOrdered" || events[index2][1].type === "listUnordered")
if (events[index2][0] === "enter")
listStack.push(index2);
else {
let tail = listStack.pop();
index2 = prepareList(events, tail, index2);
}
for (index2 = -1; ++index2 < events.length; ) {
let handler = config[events[index2][0]];
own.call(handler, events[index2][1].type) && handler[events[index2][1].type].call(Object.assign({
sliceSerialize: events[index2][2].sliceSerialize
}, context), events[index2][1]);
}
if (context.tokenStack.length > 0) {
let tail = context.tokenStack[context.tokenStack.length - 1];
(tail[1] || defaultOnError).call(context, void 0, tail[0]);
}
for (tree.position = {
start: point2(events.length > 0 ? events[0][1].start : {
line: 1,
column: 1,
offset: 0
}),
end: point2(events.length > 0 ? events[events.length - 2][1].end : {
line: 1,
column: 1,
offset: 0
})
}, index2 = -1; ++index2 < config.transforms.length; )
tree = config.transforms[index2](tree) || tree;
return tree;
}
function prepareList(events, start, length) {
let index2 = start - 1, containerBalance = -1, listSpread = !1, listItem2, lineIndex, firstBlankLineIndex, atMarker;
for (; ++index2 <= length; ) {
let event = events[index2];
switch (event[1].type) {
case "listUnordered":
case "listOrdered":
case "blockQuote": {
event[0] === "enter" ? containerBalance++ : containerBalance--, atMarker = void 0;
break;
}
case "lineEndingBlank": {
event[0] === "enter" && (listItem2 && !atMarker && !containerBalance && !firstBlankLineIndex && (firstBlankLineIndex = index2), atMarker = void 0);
break;
}
case "linePrefix":
case "listItemValue":
case "listItemMarker":
case "listItemPrefix":
case "listItemPrefixWhitespace":
break;
default:
atMarker = void 0;
}
if (!containerBalance && event[0] === "enter" && event[1].type === "listItemPrefix" || containerBalance === -1 && event[0] === "exit" && (event[1].type === "listUnordered" || event[1].type === "listOrdered")) {
if (listItem2) {
let tailIndex = index2;
for (lineIndex = void 0; tailIndex--; ) {
let tailEvent = events[tailIndex];
if (tailEvent[1].type === "lineEnding" || tailEvent[1].type === "lineEndingBlank") {
if (tailEvent[0] === "exit") continue;
lineIndex && (events[lineIndex][1].type = "lineEndingBlank", listSpread = !0), tailEvent[1].type = "lineEnding", lineIndex = tailIndex;
} else if (!(tailEvent[1].type === "linePrefix" || tailEvent[1].type === "blockQuotePrefix" || tailEvent[1].type === "blockQuotePrefixWhitespace" || tailEvent[1].type === "blockQuoteMarker" || tailEvent[1].type === "listItemIndent"))
break;
}
firstBlankLineIndex && (!lineIndex || firstBlankLineIndex < lineIndex) && (listItem2._spread = !0), listItem2.end = Object.assign({}, lineIndex ? events[lineIndex][1].start : event[1].end), events.splice(lineIndex || index2, 0, ["exit", listItem2, event[2]]), index2++, length++;
}
if (event[1].type === "listItemPrefix") {
let item = {
type: "listItem",
_spread: !1,
start: Object.assign({}, event[1].start),
// @ts-expect-error: we’ll add `end` in a second.
end: void 0
};
listItem2 = item, events.splice(index2, 0, ["enter", item, event[2]]), index2++, length++, firstBlankLineIndex = void 0, atMarker = !0;
}
}
}
return events[start][1]._spread = listSpread, length;
}
function opener(create, and) {
return open;
function open(token) {
enter.call(this, create(token), token), and && and.call(this, token);
}
}
function buffer() {
this.stack.push({
type: "fragment",
children: []
});
}
function enter(node2, token, errorHandler) {
this.stack[this.stack.length - 1].children.push(node2), this.stack.push(node2), this.tokenStack.push([token, errorHandler]), node2.position = {
start: point2(token.start),
// @ts-expect-error: `end` will be patched later.
end: void 0
};
}
function closer(and) {
return close;
function close(token) {
and && and.call(this, token), exit2.call(this, token);
}
}
function exit2(token, onExitError) {
let node2 = this.stack.pop(), open = this.tokenStack.pop();
if (open)
open[0].type !== token.type && (onExitError ? onExitError.call(this, token, open[0]) : (open[1] || defaultOnError).call(this, token, open[0]));
else throw new Error("Cannot close `" + token.type + "` (" + stringifyPosition({
start: token.start,
end: token.end
}) + "): it\u2019s not open");
node2.position.end = point2(token.end);
}
function resume() {
return toString(this.stack.pop());
}
function onenterlistordered() {
this.data.expectingFirstListItemValue = !0;
}
function onenterlistitemvalue(token) {
if (this.data.expectingFirstListItemValue) {
let ancestor = this.stack[this.stack.length - 2];
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10), this.data.expectingFirstListItemValue = void 0;
}
}
function onexitcodefencedfenceinfo() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.lang = data2;
}
function onexitcodefencedfencemeta() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.meta = data2;
}
function onexitcodefencedfence() {
this.data.flowCodeInside || (this.buffer(), this.data.flowCodeInside = !0);
}
function onexitcodefenced() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.value = data2.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, ""), this.data.flowCodeInside = void 0;
}
function onexitcodeindented() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.value = data2.replace(/(\r?\n|\r)$/g, "");
}
function onexitdefinitionlabelstring(token) {
let label = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.label = label, node2.identifier = normalizeIdentifier(this.sliceSerialize(token)).toLowerCase();
}
function onexitdefinitiontitlestring() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.title = data2;
}
function onexitdefinitiondestinationstring() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.url = data2;
}
function onexitatxheadingsequence(token) {
let node2 = this.stack[this.stack.length - 1];
if (!node2.depth) {
let depth = this.sliceSerialize(token).length;
node2.depth = depth;
}
}
function onexitsetextheadingtext() {
this.data.setextHeadingSlurpLineEnding = !0;
}
function onexitsetextheadinglinesequence(token) {
let node2 = this.stack[this.stack.length - 1];
node2.depth = this.sliceSerialize(token).codePointAt(0) === 61 ? 1 : 2;
}
function onexitsetextheading() {
this.data.setextHeadingSlurpLineEnding = void 0;
}
function onenterdata(token) {
let siblings = this.stack[this.stack.length - 1].children, tail = siblings[siblings.length - 1];
(!tail || tail.type !== "text") && (tail = text3(), tail.position = {
start: point2(token.start),
// @ts-expect-error: we’ll add `end` later.
end: void 0
}, siblings.push(tail)), this.stack.push(tail);
}
function onexitdata(token) {
let tail = this.stack.pop();
tail.value += this.sliceSerialize(token), tail.position.end = point2(token.end);
}
function onexitlineending(token) {
let context = this.stack[this.stack.length - 1];
if (this.data.atHardBreak) {
let tail = context.children[context.children.length - 1];
tail.position.end = point2(token.end), this.data.atHardBreak = void 0;
return;
}
!this.data.setextHeadingSlurpLineEnding && config.canContainEols.includes(context.type) && (onenterdata.call(this, token), onexitdata.call(this, token));
}
function onexithardbreak() {
this.data.atHardBreak = !0;
}
function onexithtmlflow() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.value = data2;
}
function onexithtmltext() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.value = data2;
}
function onexitcodetext() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.value = data2;
}
function onexitlink() {
let node2 = this.stack[this.stack.length - 1];
if (this.data.inReference) {
let referenceType = this.data.referenceType || "shortcut";
node2.type += "Reference", node2.referenceType = referenceType, delete node2.url, delete node2.title;
} else
delete node2.identifier, delete node2.label;
this.data.referenceType = void 0;
}
function onexitimage() {
let node2 = this.stack[this.stack.length - 1];
if (this.data.inReference) {
let referenceType = this.data.referenceType || "shortcut";
node2.type += "Reference", node2.referenceType = referenceType, delete node2.url, delete node2.title;
} else
delete node2.identifier, delete node2.label;
this.data.referenceType = void 0;
}
function onexitlabeltext(token) {
let string3 = this.sliceSerialize(token), ancestor = this.stack[this.stack.length - 2];
ancestor.label = decodeString(string3), ancestor.identifier = normalizeIdentifier(string3).toLowerCase();
}
function onexitlabel() {
let fragment = this.stack[this.stack.length - 1], value = this.resume(), node2 = this.stack[this.stack.length - 1];
if (this.data.inReference = !0, node2.type === "link") {
let children = fragment.children;
node2.children = children;
} else
node2.alt = value;
}
function onexitresourcedestinationstring() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.url = data2;
}
function onexitresourcetitlestring() {
let data2 = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.title = data2;
}
function onexitresource() {
this.data.inReference = void 0;
}
function onenterreference() {
this.data.referenceType = "collapsed";
}
function onexitreferencestring(token) {
let label = this.resume(), node2 = this.stack[this.stack.length - 1];
node2.label = label, node2.identifier = normalizeIdentifier(this.sliceSerialize(token)).toLowerCase(), this.data.referenceType = "full";
}
function onexitcharacterreferencemarker(token) {
this.data.characterReferenceType = token.type;
}
function onexitcharacterreferencevalue(token) {
let data2 = this.sliceSerialize(token), type = this.data.characterReferenceType, value;
type ? (value = decodeNumericCharacterReference(data2, type === "characterReferenceMarkerNumeric" ? 10 : 16), this.data.characterReferenceType = void 0) : value = decodeNamedCharacterReference(data2);
let tail = this.stack[this.stack.length - 1];
tail.value += value;
}
function onexitcharacterreference(token) {
let tail = this.stack.pop();
tail.position.end = point2(token.end);
}
function onexitautolinkprotocol(token) {
onexitdata.call(this, token);
let node2 = this.stack[this.stack.length - 1];
node2.url = this.sliceSerialize(token);
}
function onexitautolinkemail(token) {
onexitdata.call(this, token);
let node2 = this.stack[this.stack.length - 1];
node2.url = "mailto:" + this.sliceSerialize(token);
}
function blockQuote2() {
return {
type: "blockquote",
children: []
};
}
function codeFlow() {
return {
type: "code",
lang: null,
meta: null,
value: ""
};
}
function codeText2() {
return {
type: "inlineCode",
value: ""
};
}
function definition2() {
return {
type: "definition",
identifier: "",
label: null,
title: null,
url: ""
};
}
function emphasis() {
return {
type: "emphasis",
children: []
};
}
function heading() {
return {
type: "heading",
// @ts-expect-error `depth` will be set later.
depth: 0,
children: []
};
}
function hardBreak() {
return {
type: "break"
};
}
function html() {
return {
type: "html",
value: ""
};
}
function image() {
return {
type: "image",
title: null,
url: "",
alt: null
};
}
function link() {
return {
type: "link",
title: null,
url: "",
children: []
};
}
function list2(token) {
return {
type: "list",
ordered: token.type === "listOrdered",
start: null,
spread: token._spread,
children: []
};
}
function listItem(token) {
return {
type: "listItem",
spread: token._spread,
checked: null,
children: []
};
}
function paragraph() {
return {
type: "paragraph",
children: []
};
}
function strong() {
return {
type: "strong",
children: []
};
}
function text3() {
return {
type: "text",
value: ""
};
}
function thematicBreak2() {
return {
type: "thematicBreak"
};
}
}
function point2(d) {
return {
line: d.line,
column: d.column,
offset: d.offset
};
}
function configure(combined, extensions) {
let index2 = -1;
for (; ++index2 < extensions.length; ) {
let value = extensions[index2];
Array.isArray(value) ? configure(combined, value) : extension(combined, value);
}
}
function extension(combined, extension2) {
let key;
for (key in extension2)
if (own.call(extension2, key))
switch (key) {
case "canContainEols": {
let right = extension2[key];
right && combined[key].push(...right);
break;
}
case "transforms": {
let right = extension2[key];
right && combined[key].push(...right);
break;
}
case "enter":
case "exit": {
let right = extension2[key];
right && Object.assign(combined[key], right);
break;
}
}
}
function defaultOnError(left, right) {
throw left ? new Error("Cannot close `" + left.type + "` (" + stringifyPosition({
start: left.start,
end: left.end
}) + "): a different token (`" + right.type + "`, " + stringifyPosition({
start: right.start,
end: right.end
}) + ") is open") : new Error("Cannot close document, a token (`" + right.type + "`, " + stringifyPosition({
start: right.start,
end: right.end
}) + ") is still open");
}
 
// node_modules/remark-parse/lib/index.js
function remarkParse(options) {
let self2 = this;
self2.parser = parser;
function parser(doc) {
return fromMarkdown(doc, {
...self2.data("settings"),
...options,
// Note: these options are not in the readme.
// The goal is for them to be set by plugins on `data` instead of being
// passed by users.
extensions: self2.data("micromarkExtensions") || [],
mdastExtensions: self2.data("fromMarkdownExtensions") || []
});
}
}
 
// src-autolink/core/parser.js
async function parse2(markdownText) {
let remarkGfm = await dynamic_import_esm_default("remark-gfm").default, { unified } = await dynamic_import_esm_default("unified");
return await unified().use(remarkParse).use(remarkGfm).parse(markdownText);
}
 
// node_modules/unist-util-is/lib/index.js
var convert = (
// Note: overloads in JSDoc can’t yet use different `@template`s.
/**
* @type {(
* (<Condition extends string>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & {type: Condition}) &
* (<Condition extends Props>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Condition) &
* (<Condition extends TestFunction>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Predicate<Condition, Node>) &
* ((test?: null | undefined) => (node?: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node) &
* ((test?: Test) => Check)
* )}
*/
/**
* @param {Test} [test]
* @returns {Check}
*/
function(test) {
if (test == null)
return ok;
if (typeof test == "function")
return castFactory(test);
if (typeof test == "object")
return Array.isArray(test) ? anyFactory(test) : propsFactory(test);
if (typeof test == "string")
return typeFactory(test);
throw new Error("Expected function, string, or object as test");
}
);
function anyFactory(tests) {
let checks = [], index2 = -1;
for (; ++index2 < tests.length; )
checks[index2] = convert(tests[index2]);
return castFactory(any);
function any(...parameters) {
let index3 = -1;
for (; ++index3 < checks.length; )
if (checks[index3].apply(this, parameters)) return !0;
return !1;
}
}
function propsFactory(check) {
let checkAsRecord = (
/** @type {Record<string, unknown>} */
check
);
return castFactory(all2);
function all2(node2) {
let nodeAsRecord = (
/** @type {Record<string, unknown>} */
/** @type {unknown} */
node2
), key;
for (key in check)
if (nodeAsRecord[key] !== checkAsRecord[key]) return !1;
return !0;
}
}
function typeFactory(check) {
return castFactory(type);
function type(node2) {
return node2 && node2.type === check;
}
}
function castFactory(testFunction) {
return check;
function check(value, index2, parent) {
return !!(looksLikeANode(value) && testFunction.call(
this,
value,
typeof index2 == "number" ? index2 : void 0,
parent || void 0
));
}
}
function ok() {
return !0;
}
function looksLikeANode(value) {
return value !== null && typeof value == "object" && "type" in value;
}
 
// node_modules/unist-util-visit-parents/lib/index.js
var empty = [], CONTINUE = !0, EXIT = !1, SKIP = "skip";
function visitParents(tree, test, visitor, reverse) {
let check;
typeof test == "function" && typeof visitor != "function" ? (reverse = visitor, visitor = test) : check = test;
let is2 = convert(check), step = reverse ? -1 : 1;
factory(tree, void 0, [])();
function factory(node2, index2, parents) {
let value = (
/** @type {Record<string, unknown>} */
node2 && typeof node2 == "object" ? node2 : {}
);
if (typeof value.type == "string") {
let name = (
// `hast`
typeof value.tagName == "string" ? value.tagName : (
// `xast`
typeof value.name == "string" ? value.name : void 0
)
);
Object.defineProperty(visit2, "name", {
value: "node (" + (node2.type + (name ? "<" + name + ">" : "")) + ")"
});
}
return visit2;
function visit2() {
let result = empty, subresult, offset, grandparents;
if ((!test || is2(node2, index2, parents[parents.length - 1] || void 0)) && (result = toResult(visitor(node2, parents)), result[0] === EXIT))
return result;
if ("children" in node2 && node2.children) {
let nodeAsParent = (
/** @type {UnistParent} */
node2
);
if (nodeAsParent.children && result[0] !== SKIP)
for (offset = (reverse ? nodeAsParent.children.length : -1) + step, grandparents = parents.concat(nodeAsParent); offset > -1 && offset < nodeAsParent.children.length; ) {
let child = nodeAsParent.children[offset];
if (subresult = factory(child, offset, grandparents)(), subresult[0] === EXIT)
return subresult;
offset = typeof subresult[1] == "number" ? subresult[1] : offset + step;
}
}
return result;
}
}
}
function toResult(value) {
return Array.isArray(value) ? value : typeof value == "number" ? [CONTINUE, value] : value == null ? empty : [value];
}
 
// src-autolink/core/getNoteLinksUUIDFromMarkdown.js
async function getNoteLinksUUIDFromMarkdown(markdownText) {
let ast = await parse2(markdownText), linkedNoteUUIDs = [];
return visitParents(ast, "link", (node2, ancestors) => {
let matches = node2.url.match(/(https?:\/\/)?(www\.)?amplenote\.com\/notes\/([a-f0-9-]+)(\??.*)(#?.*)?/);
matches && linkedNoteUUIDs.push(matches[3]);
}), linkedNoteUUIDs;
}
 
// node_modules/lodash-es/_freeGlobal.js
var freeGlobal = typeof globalThis == "object" && globalThis && globalThis.Object === Object && globalThis, freeGlobal_default = freeGlobal;
 
// node_modules/lodash-es/_root.js
var freeSelf = typeof self == "object" && self && self.Object === Object && self, root = freeGlobal_default || freeSelf || Function("return this")(), root_default = root;
 
// node_modules/lodash-es/_Symbol.js
var Symbol2 = root_default.Symbol, Symbol_default = Symbol2;
 
// node_modules/lodash-es/_getRawTag.js
var objectProto = Object.prototype, hasOwnProperty2 = objectProto.hasOwnProperty, nativeObjectToString = objectProto.toString, symToStringTag = Symbol_default ? Symbol_default.toStringTag : void 0;
function getRawTag(value) {
var isOwn = hasOwnProperty2.call(value, symToStringTag), tag = value[symToStringTag];
try {
value[symToStringTag] = void 0;
var unmasked = !0;
} catch {
}
var result = nativeObjectToString.call(value);
return unmasked && (isOwn ? value[symToStringTag] = tag : delete value[symToStringTag]), result;
}
var getRawTag_default = getRawTag;
 
// node_modules/lodash-es/_objectToString.js
var objectProto2 = Object.prototype, nativeObjectToString2 = objectProto2.toString;
function objectToString(value) {
return nativeObjectToString2.call(value);
}
var objectToString_default = objectToString;
 
// node_modules/lodash-es/_baseGetTag.js
var nullTag = "[object Null]", undefinedTag = "[object Undefined]", symToStringTag2 = Symbol_default ? Symbol_default.toStringTag : void 0;
function baseGetTag(value) {
return value == null ? value === void 0 ? undefinedTag : nullTag : symToStringTag2 && symToStringTag2 in Object(value) ? getRawTag_default(value) : objectToString_default(value);
}
var baseGetTag_default = baseGetTag;
 
// node_modules/lodash-es/isObjectLike.js
function isObjectLike(value) {
return value != null && typeof value == "object";
}
var isObjectLike_default = isObjectLike;
 
// node_modules/lodash-es/isSymbol.js
var symbolTag = "[object Symbol]";
function isSymbol(value) {
return typeof value == "symbol" || isObjectLike_default(value) && baseGetTag_default(value) == symbolTag;
}
var isSymbol_default = isSymbol;
 
// node_modules/lodash-es/_arrayMap.js
function arrayMap(array, iteratee) {
for (var index2 = -1, length = array == null ? 0 : array.length, result = Array(length); ++index2 < length; )
result[index2] = iteratee(array[index2], index2, array);
return result;
}
var arrayMap_default = arrayMap;
 
// node_modules/lodash-es/isArray.js
var isArray = Array.isArray, isArray_default = isArray;
 
// node_modules/lodash-es/_baseToString.js
var INFINITY = 1 / 0, symbolProto = Symbol_default ? Symbol_default.prototype : void 0, symbolToString = symbolProto ? symbolProto.toString : void 0;
function baseToString(value) {
if (typeof value == "string")
return value;
if (isArray_default(value))
return arrayMap_default(value, baseToString) + "";
if (isSymbol_default(value))
return symbolToString ? symbolToString.call(value) : "";
var result = value + "";
return result == "0" && 1 / value == -INFINITY ? "-0" : result;
}
var baseToString_default = baseToString;
 
// node_modules/lodash-es/isObject.js
function isObject(value) {
var type = typeof value;
return value != null && (type == "object" || type == "function");
}
var isObject_default = isObject;
 
// node_modules/lodash-es/isFunction.js
var asyncTag = "[object AsyncFunction]", funcTag = "[object Function]", genTag = "[object GeneratorFunction]", proxyTag = "[object Proxy]";
function isFunction(value) {
if (!isObject_default(value))
return !1;
var tag = baseGetTag_default(value);
return tag == funcTag || tag == genTag || tag == asyncTag || tag == proxyTag;
}
var isFunction_default = isFunction;
 
// node_modules/lodash-es/_coreJsData.js
var coreJsData = root_default["__core-js_shared__"], coreJsData_default = coreJsData;
 
// node_modules/lodash-es/_isMasked.js
var maskSrcKey = function() {
var uid = /[^.]+$/.exec(coreJsData_default && coreJsData_default.keys && coreJsData_default.keys.IE_PROTO || "");
return uid ? "Symbol(src)_1." + uid : "";
}();
function isMasked(func) {
return !!maskSrcKey && maskSrcKey in func;
}
var isMasked_default = isMasked;
 
// node_modules/lodash-es/_toSource.js
var funcProto = Function.prototype, funcToString = funcProto.toString;
function toSource(func) {
if (func != null) {
try {
return funcToString.call(func);
} catch {
}
try {
return func + "";
} catch {
}
}
return "";
}
var toSource_default = toSource;
 
// node_modules/lodash-es/_baseIsNative.js
var reRegExpChar = /[\\^$.*+?()[\]{}|]/g, reIsHostCtor = /^\[object .+?Constructor\]$/, funcProto2 = Function.prototype, objectProto3 = Object.prototype, funcToString2 = funcProto2.toString, hasOwnProperty3 = objectProto3.hasOwnProperty, reIsNative = RegExp(
"^" + funcToString2.call(hasOwnProperty3).replace(reRegExpChar, "\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, "$1.*?") + "$"
);
function baseIsNative(value) {
if (!isObject_default(value) || isMasked_default(value))
return !1;
var pattern = isFunction_default(value) ? reIsNative : reIsHostCtor;
return pattern.test(toSource_default(value));
}
var baseIsNative_default = baseIsNative;
 
// node_modules/lodash-es/_getValue.js
function getValue(object, key) {
return object == null ? void 0 : object[key];
}
var getValue_default = getValue;
 
// node_modules/lodash-es/_getNative.js
function getNative(object, key) {
var value = getValue_default(object, key);
return baseIsNative_default(value) ? value : void 0;
}
var getNative_default = getNative;
 
// node_modules/lodash-es/_WeakMap.js
var WeakMap = getNative_default(root_default, "WeakMap"), WeakMap_default = WeakMap;
 
// node_modules/lodash-es/_baseCreate.js
var objectCreate = Object.create, baseCreate = /* @__PURE__ */ function() {
function object() {
}
return function(proto) {
if (!isObject_default(proto))
return {};
if (objectCreate)
return objectCreate(proto);
object.prototype = proto;
var result = new object();
return object.prototype = void 0, result;
};
}(), baseCreate_default = baseCreate;
 
// node_modules/lodash-es/_copyArray.js
function copyArray(source, array) {
var index2 = -1, length = source.length;
for (array || (array = Array(length)); ++index2 < length; )
array[index2] = source[index2];
return array;
}
var copyArray_default = copyArray;
 
// node_modules/lodash-es/_defineProperty.js
var defineProperty = function() {
try {
var func = getNative_default(Object, "defineProperty");
return func({}, "", {}), func;
} catch {
}
}(), defineProperty_default = defineProperty;
 
// node_modules/lodash-es/_arrayEach.js
function arrayEach(array, iteratee) {
for (var index2 = -1, length = array == null ? 0 : array.length; ++index2 < length && iteratee(array[index2], index2, array) !== !1; )
;
return array;
}
var arrayEach_default = arrayEach;
 
// node_modules/lodash-es/_isIndex.js
var MAX_SAFE_INTEGER = 9007199254740991, reIsUint = /^(?:0|[1-9]\d*)$/;
function isIndex(value, length) {
var type = typeof value;
return length = length == null ? MAX_SAFE_INTEGER : length, !!length && (type == "number" || type != "symbol" && reIsUint.test(value)) && value > -1 && value % 1 == 0 && value < length;
}
var isIndex_default = isIndex;
 
// node_modules/lodash-es/_baseAssignValue.js
function baseAssignValue(object, key, value) {
key == "__proto__" && defineProperty_default ? defineProperty_default(object, key, {
configurable: !0,
enumerable: !0,
value,
writable: !0
}) : object[key] = value;
}
var baseAssignValue_default = baseAssignValue;
 
// node_modules/lodash-es/eq.js
function eq(value, other) {
return value === other || value !== value && other !== other;
}
var eq_default = eq;
 
// node_modules/lodash-es/_assignValue.js
var objectProto4 = Object.prototype, hasOwnProperty4 = objectProto4.hasOwnProperty;
function assignValue(object, key, value) {
var objValue = object[key];
(!(hasOwnProperty4.call(object, key) && eq_default(objValue, value)) || value === void 0 && !(key in object)) && baseAssignValue_default(object, key, value);
}
var assignValue_default = assignValue;
 
// node_modules/lodash-es/_copyObject.js
function copyObject(source, props, object, customizer) {
var isNew = !object;
object || (object = {});
for (var index2 = -1, length = props.length; ++index2 < length; ) {
var key = props[index2], newValue = customizer ? customizer(object[key], source[key], key, object, source) : void 0;
newValue === void 0 && (newValue = source[key]), isNew ? baseAssignValue_default(object, key, newValue) : assignValue_default(object, key, newValue);
}
return object;
}
var copyObject_default = copyObject;
 
// node_modules/lodash-es/isLength.js
var MAX_SAFE_INTEGER2 = 9007199254740991;
function isLength(value) {
return typeof value == "number" && value > -1 && value % 1 == 0 && value <= MAX_SAFE_INTEGER2;
}
var isLength_default = isLength;
 
// node_modules/lodash-es/isArrayLike.js
function isArrayLike(value) {
return value != null && isLength_default(value.length) && !isFunction_default(value);
}
var isArrayLike_default = isArrayLike;
 
// node_modules/lodash-es/_isPrototype.js
var objectProto5 = Object.prototype;
function isPrototype(value) {
var Ctor = value && value.constructor, proto = typeof Ctor == "function" && Ctor.prototype || objectProto5;
return value === proto;
}
var isPrototype_default = isPrototype;
 
// node_modules/lodash-es/_baseTimes.js
function baseTimes(n, iteratee) {
for (var index2 = -1, result = Array(n); ++index2 < n; )
result[index2] = iteratee(index2);
return result;
}
var baseTimes_default = baseTimes;
 
// node_modules/lodash-es/_baseIsArguments.js
var argsTag = "[object Arguments]";
function baseIsArguments(value) {
return isObjectLike_default(value) && baseGetTag_default(value) == argsTag;
}
var baseIsArguments_default = baseIsArguments;
 
// node_modules/lodash-es/isArguments.js
var objectProto6 = Object.prototype, hasOwnProperty5 = objectProto6.hasOwnProperty, propertyIsEnumerable = objectProto6.propertyIsEnumerable, isArguments = baseIsArguments_default(/* @__PURE__ */ function() {
return arguments;
}()) ? baseIsArguments_default : function(value) {
return isObjectLike_default(value) && hasOwnProperty5.call(value, "callee") && !propertyIsEnumerable.call(value, "callee");
}, isArguments_default = isArguments;
 
// node_modules/lodash-es/stubFalse.js
function stubFalse() {
return !1;
}
var stubFalse_default = stubFalse;
 
// node_modules/lodash-es/isBuffer.js
var freeExports = typeof exports == "object" && exports && !exports.nodeType && exports, freeModule = freeExports && typeof module == "object" && module && !module.nodeType && module, moduleExports = freeModule && freeModule.exports === freeExports, Buffer2 = moduleExports ? root_default.Buffer : void 0, nativeIsBuffer = Buffer2 ? Buffer2.isBuffer : void 0, isBuffer = nativeIsBuffer || stubFalse_default, isBuffer_default = isBuffer;
 
// node_modules/lodash-es/_baseIsTypedArray.js
var argsTag2 = "[object Arguments]", arrayTag = "[object Array]", boolTag = "[object Boolean]", dateTag = "[object Date]", errorTag = "[object Error]", funcTag2 = "[object Function]", mapTag = "[object Map]", numberTag = "[object Number]", objectTag = "[object Object]", regexpTag = "[object RegExp]", setTag = "[object Set]", stringTag = "[object String]", weakMapTag = "[object WeakMap]", arrayBufferTag = "[object ArrayBuffer]", dataViewTag = "[object DataView]", float32Tag = "[object Float32Array]", float64Tag = "[object Float64Array]", int8Tag = "[object Int8Array]", int16Tag = "[object Int16Array]", int32Tag = "[object Int32Array]", uint8Tag = "[object Uint8Array]", uint8ClampedTag = "[object Uint8ClampedArray]", uint16Tag = "[object Uint16Array]", uint32Tag = "[object Uint32Array]", typedArrayTags = {};
typedArrayTags[float32Tag] = typedArrayTags[float64Tag] = typedArrayTags[int8Tag] = typedArrayTags[int16Tag] = typedArrayTags[int32Tag] = typedArrayTags[uint8Tag] = typedArrayTags[uint8ClampedTag] = typedArrayTags[uint16Tag] = typedArrayTags[uint32Tag] = !0;
typedArrayTags[argsTag2] = typedArrayTags[arrayTag] = typedArrayTags[arrayBufferTag] = typedArrayTags[boolTag] = typedArrayTags[dataViewTag] = typedArrayTags[dateTag] = typedArrayTags[errorTag] = typedArrayTags[funcTag2] = typedArrayTags[mapTag] = typedArrayTags[numberTag] = typedArrayTags[objectTag] = typedArrayTags[regexpTag] = typedArrayTags[setTag] = typedArrayTags[stringTag] = typedArrayTags[weakMapTag] = !1;
function baseIsTypedArray(value) {
return isObjectLike_default(value) && isLength_default(value.length) && !!typedArrayTags[baseGetTag_default(value)];
}
var baseIsTypedArray_default = baseIsTypedArray;
 
// node_modules/lodash-es/_baseUnary.js
function baseUnary(func) {
return function(value) {
return func(value);
};
}
var baseUnary_default = baseUnary;
 
// node_modules/lodash-es/_nodeUtil.js
var freeExports2 = typeof exports == "object" && exports && !exports.nodeType && exports, freeModule2 = freeExports2 && typeof module == "object" && module && !module.nodeType && module, moduleExports2 = freeModule2 && freeModule2.exports === freeExports2, freeProcess = moduleExports2 && freeGlobal_default.process, nodeUtil = function() {
try {
var types = freeModule2 && freeModule2.require && freeModule2.require("util").types;
return types || freeProcess && freeProcess.binding && freeProcess.binding("util");
} catch {
}
}(), nodeUtil_default = nodeUtil;
 
// node_modules/lodash-es/isTypedArray.js
var nodeIsTypedArray = nodeUtil_default && nodeUtil_default.isTypedArray, isTypedArray = nodeIsTypedArray ? baseUnary_default(nodeIsTypedArray) : baseIsTypedArray_default, isTypedArray_default = isTypedArray;
 
// node_modules/lodash-es/_arrayLikeKeys.js
var objectProto7 = Object.prototype, hasOwnProperty6 = objectProto7.hasOwnProperty;
function arrayLikeKeys(value, inherited) {
var isArr = isArray_default(value), isArg = !isArr && isArguments_default(value), isBuff = !isArr && !isArg && isBuffer_default(value), isType = !isArr && !isArg && !isBuff && isTypedArray_default(value), skipIndexes = isArr || isArg || isBuff || isType, result = skipIndexes ? baseTimes_default(value.length, String) : [], length = result.length;
for (var key in value)
(inherited || hasOwnProperty6.call(value, key)) && !(skipIndexes && // Safari 9 has enumerable `arguments.length` in strict mode.
(key == "length" || // Node.js 0.10 has enumerable non-index properties on buffers.
isBuff && (key == "offset" || key == "parent") || // PhantomJS 2 has enumerable non-index properties on typed arrays.
isType && (key == "buffer" || key == "byteLength" || key == "byteOffset") || // Skip index properties.
isIndex_default(key, length))) && result.push(key);
return result;
}
var arrayLikeKeys_default = arrayLikeKeys;
 
// node_modules/lodash-es/_overArg.js
function overArg(func, transform) {
return function(arg) {
return func(transform(arg));
};
}
var overArg_default = overArg;
 
// node_modules/lodash-es/_nativeKeys.js
var nativeKeys = overArg_default(Object.keys, Object), nativeKeys_default = nativeKeys;
 
// node_modules/lodash-es/_baseKeys.js
var objectProto8 = Object.prototype, hasOwnProperty7 = objectProto8.hasOwnProperty;
function baseKeys(object) {
if (!isPrototype_default(object))
return nativeKeys_default(object);
var result = [];
for (var key in Object(object))
hasOwnProperty7.call(object, key) && key != "constructor" && result.push(key);
return result;
}
var baseKeys_default = baseKeys;
 
// node_modules/lodash-es/keys.js
function keys(object) {
return isArrayLike_default(object) ? arrayLikeKeys_default(object) : baseKeys_default(object);
}
var keys_default = keys;
 
// node_modules/lodash-es/_nativeKeysIn.js
function nativeKeysIn(object) {
var result = [];
if (object != null)
for (var key in Object(object))
result.push(key);
return result;
}
var nativeKeysIn_default = nativeKeysIn;
 
// node_modules/lodash-es/_baseKeysIn.js
var objectProto9 = Object.prototype, hasOwnProperty8 = objectProto9.hasOwnProperty;
function baseKeysIn(object) {
if (!isObject_default(object))
return nativeKeysIn_default(object);
var isProto = isPrototype_default(object), result = [];
for (var key in object)
key == "constructor" && (isProto || !hasOwnProperty8.call(object, key)) || result.push(key);
return result;
}
var baseKeysIn_default = baseKeysIn;
 
// node_modules/lodash-es/keysIn.js
function keysIn(object) {
return isArrayLike_default(object) ? arrayLikeKeys_default(object, !0) : baseKeysIn_default(object);
}
var keysIn_default = keysIn;
 
// node_modules/lodash-es/_isKey.js
var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/, reIsPlainProp = /^\w*$/;
function isKey(value, object) {
if (isArray_default(value))
return !1;
var type = typeof value;
return type == "number" || type == "symbol" || type == "boolean" || value == null || isSymbol_default(value) ? !0 : reIsPlainProp.test(value) || !reIsDeepProp.test(value) || object != null && value in Object(object);
}
var isKey_default = isKey;
 
// node_modules/lodash-es/_nativeCreate.js
var nativeCreate = getNative_default(Object, "create"), nativeCreate_default = nativeCreate;
 
// node_modules/lodash-es/_hashClear.js
function hashClear() {
this.__data__ = nativeCreate_default ? nativeCreate_default(null) : {}, this.size = 0;
}
var hashClear_default = hashClear;
 
// node_modules/lodash-es/_hashDelete.js
function hashDelete(key) {
var result = this.has(key) && delete this.__data__[key];
return this.size -= result ? 1 : 0, result;
}
var hashDelete_default = hashDelete;
 
// node_modules/lodash-es/_hashGet.js
var HASH_UNDEFINED = "__lodash_hash_undefined__", objectProto10 = Object.prototype, hasOwnProperty9 = objectProto10.hasOwnProperty;
function hashGet(key) {
var data = this.__data__;
if (nativeCreate_default) {
var result = data[key];
return result === HASH_UNDEFINED ? void 0 : result;
}
return hasOwnProperty9.call(data, key) ? data[key] : void 0;
}
var hashGet_default = hashGet;
 
// node_modules/lodash-es/_hashHas.js
var objectProto11 = Object.prototype, hasOwnProperty10 = objectProto11.hasOwnProperty;
function hashHas(key) {
var data = this.__data__;
return nativeCreate_default ? data[key] !== void 0 : hasOwnProperty10.call(data, key);
}
var hashHas_default = hashHas;
 
// node_modules/lodash-es/_hashSet.js
var HASH_UNDEFINED2 = "__lodash_hash_undefined__";
function hashSet(key, value) {
var data = this.__data__;
return this.size += this.has(key) ? 0 : 1, data[key] = nativeCreate_default && value === void 0 ? HASH_UNDEFINED2 : value, this;
}
var hashSet_default = hashSet;
 
// node_modules/lodash-es/_Hash.js
function Hash(entries) {
var index2 = -1, length = entries == null ? 0 : entries.length;
for (this.clear(); ++index2 < length; ) {
var entry = entries[index2];
this.set(entry[0], entry[1]);
}
}
Hash.prototype.clear = hashClear_default;
Hash.prototype.delete = hashDelete_default;
Hash.prototype.get = hashGet_default;
Hash.prototype.has = hashHas_default;
Hash.prototype.set = hashSet_default;
var Hash_default = Hash;
 
// node_modules/lodash-es/_listCacheClear.js
function listCacheClear() {
this.__data__ = [], this.size = 0;
}
var listCacheClear_default = listCacheClear;
 
// node_modules/lodash-es/_assocIndexOf.js
function assocIndexOf(array, key) {
for (var length = array.length; length--; )
if (eq_default(array[length][0], key))
return length;
return -1;
}
var assocIndexOf_default = assocIndexOf;
 
// node_modules/lodash-es/_listCacheDelete.js
var arrayProto = Array.prototype, splice2 = arrayProto.splice;
function listCacheDelete(key) {
var data = this.__data__, index2 = assocIndexOf_default(data, key);
if (index2 < 0)
return !1;
var lastIndex = data.length - 1;
return index2 == lastIndex ? data.pop() : splice2.call(data, index2, 1), --this.size, !0;
}
var listCacheDelete_default = listCacheDelete;
 
// node_modules/lodash-es/_listCacheGet.js
function listCacheGet(key) {
var data = this.__data__, index2 = assocIndexOf_default(data, key);
return index2 < 0 ? void 0 : data[index2][1];
}
var listCacheGet_default = listCacheGet;
 
// node_modules/lodash-es/_listCacheHas.js
function listCacheHas(key) {
return assocIndexOf_default(this.__data__, key) > -1;
}
var listCacheHas_default = listCacheHas;
 
// node_modules/lodash-es/_listCacheSet.js
function listCacheSet(key, value) {
var data = this.__data__, index2 = assocIndexOf_default(data, key);
return index2 < 0 ? (++this.size, data.push([key, value])) : data[index2][1] = value, this;
}
var listCacheSet_default = listCacheSet;
 
// node_modules/lodash-es/_ListCache.js
function ListCache(entries) {
var index2 = -1, length = entries == null ? 0 : entries.length;
for (this.clear(); ++index2 < length; ) {
var entry = entries[index2];
this.set(entry[0], entry[1]);
}
}
ListCache.prototype.clear = listCacheClear_default;
ListCache.prototype.delete = listCacheDelete_default;
ListCache.prototype.get = listCacheGet_default;
ListCache.prototype.has = listCacheHas_default;
ListCache.prototype.set = listCacheSet_default;
var ListCache_default = ListCache;
 
// node_modules/lodash-es/_Map.js
var Map2 = getNative_default(root_default, "Map"), Map_default = Map2;
 
// node_modules/lodash-es/_mapCacheClear.js
function mapCacheClear() {
this.size = 0, this.__data__ = {
hash: new Hash_default(),
map: new (Map_default || ListCache_default)(),
string: new Hash_default()
};
}
var mapCacheClear_default = mapCacheClear;
 
// node_modules/lodash-es/_isKeyable.js
function isKeyable(value) {
var type = typeof value;
return type == "string" || type == "number" || type == "symbol" || type == "boolean" ? value !== "__proto__" : value === null;
}
var isKeyable_default = isKeyable;
 
// node_modules/lodash-es/_getMapData.js
function getMapData(map, key) {
var data = map.__data__;
return isKeyable_default(key) ? data[typeof key == "string" ? "string" : "hash"] : data.map;
}
var getMapData_default = getMapData;
 
// node_modules/lodash-es/_mapCacheDelete.js
function mapCacheDelete(key) {
var result = getMapData_default(this, key).delete(key);
return this.size -= result ? 1 : 0, result;
}
var mapCacheDelete_default = mapCacheDelete;
 
// node_modules/lodash-es/_mapCacheGet.js
function mapCacheGet(key) {
return getMapData_default(this, key).get(key);
}
var mapCacheGet_default = mapCacheGet;
 
// node_modules/lodash-es/_mapCacheHas.js
function mapCacheHas(key) {
return getMapData_default(this, key).has(key);
}
var mapCacheHas_default = mapCacheHas;
 
// node_modules/lodash-es/_mapCacheSet.js
function mapCacheSet(key, value) {
var data = getMapData_default(this, key), size = data.size;
return data.set(key, value), this.size += data.size == size ? 0 : 1, this;
}
var mapCacheSet_default = mapCacheSet;
 
// node_modules/lodash-es/_MapCache.js
function MapCache(entries) {
var index2 = -1, length = entries == null ? 0 : entries.length;
for (this.clear(); ++index2 < length; ) {
var entry = entries[index2];
this.set(entry[0], entry[1]);
}
}
MapCache.prototype.clear = mapCacheClear_default;
MapCache.prototype.delete = mapCacheDelete_default;
MapCache.prototype.get = mapCacheGet_default;
MapCache.prototype.has = mapCacheHas_default;
MapCache.prototype.set = mapCacheSet_default;
var MapCache_default = MapCache;
 
// node_modules/lodash-es/memoize.js
var FUNC_ERROR_TEXT = "Expected a function";
function memoize(func, resolver2) {
if (typeof func != "function" || resolver2 != null && typeof resolver2 != "function")
throw new TypeError(FUNC_ERROR_TEXT);
var memoized = function() {
var args = arguments, key = resolver2 ? resolver2.apply(this, args) : args[0], cache = memoized.cache;
if (cache.has(key))
return cache.get(key);
var result = func.apply(this, args);
return memoized.cache = cache.set(key, result) || cache, result;
};
return memoized.cache = new (memoize.Cache || MapCache_default)(), memoized;
}
memoize.Cache = MapCache_default;
var memoize_default = memoize;
 
// node_modules/lodash-es/_memoizeCapped.js
var MAX_MEMOIZE_SIZE = 500;
function memoizeCapped(func) {
var result = memoize_default(func, function(key) {
return cache.size === MAX_MEMOIZE_SIZE && cache.clear(), key;
}), cache = result.cache;
return result;
}
var memoizeCapped_default = memoizeCapped;
 
// node_modules/lodash-es/_stringToPath.js
var rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g, reEscapeChar = /\\(\\)?/g, stringToPath = memoizeCapped_default(function(string3) {
var result = [];
return string3.charCodeAt(0) === 46 && result.push(""), string3.replace(rePropName, function(match, number, quote, subString) {
result.push(quote ? subString.replace(reEscapeChar, "$1") : number || match);
}), result;
}), stringToPath_default = stringToPath;
 
// node_modules/lodash-es/toString.js
function toString2(value) {
return value == null ? "" : baseToString_default(value);
}
var toString_default = toString2;
 
// node_modules/lodash-es/_castPath.js
function castPath(value, object) {
return isArray_default(value) ? value : isKey_default(value, object) ? [value] : stringToPath_default(toString_default(value));
}
var castPath_default = castPath;
 
// node_modules/lodash-es/_toKey.js
var INFINITY2 = 1 / 0;
function toKey(value) {
if (typeof value == "string" || isSymbol_default(value))
return value;
var result = value + "";
return result == "0" && 1 / value == -INFINITY2 ? "-0" : result;
}
var toKey_default = toKey;
 
// node_modules/lodash-es/_baseGet.js
function baseGet(object, path) {
path = castPath_default(path, object);
for (var index2 = 0, length = path.length; object != null && index2 < length; )
object = object[toKey_default(path[index2++])];
return index2 && index2 == length ? object : void 0;
}
var baseGet_default = baseGet;
 
// node_modules/lodash-es/get.js
function get(object, path, defaultValue) {
var result = object == null ? void 0 : baseGet_default(object, path);
return result === void 0 ? defaultValue : result;
}
var get_default = get;
 
// node_modules/lodash-es/_arrayPush.js
function arrayPush(array, values) {
for (var index2 = -1, length = values.length, offset = array.length; ++index2 < length; )
array[offset + index2] = values[index2];
return array;
}
var arrayPush_default = arrayPush;
 
// node_modules/lodash-es/_getPrototype.js
var getPrototype = overArg_default(Object.getPrototypeOf, Object), getPrototype_default = getPrototype;
 
// node_modules/lodash-es/_stackClear.js
function stackClear() {
this.__data__ = new ListCache_default(), this.size = 0;
}
var stackClear_default = stackClear;
 
// node_modules/lodash-es/_stackDelete.js
function stackDelete(key) {
var data = this.__data__, result = data.delete(key);
return this.size = data.size, result;
}
var stackDelete_default = stackDelete;
 
// node_modules/lodash-es/_stackGet.js
function stackGet(key) {
return this.__data__.get(key);
}
var stackGet_default = stackGet;
 
// node_modules/lodash-es/_stackHas.js
function stackHas(key) {
return this.__data__.has(key);
}
var stackHas_default = stackHas;
 
// node_modules/lodash-es/_stackSet.js
var LARGE_ARRAY_SIZE = 200;
function stackSet(key, value) {
var data = this.__data__;
if (data instanceof ListCache_default) {
var pairs = data.__data__;
if (!Map_default || pairs.length < LARGE_ARRAY_SIZE - 1)
return pairs.push([key, value]), this.size = ++data.size, this;
data = this.__data__ = new MapCache_default(pairs);
}
return data.set(key, value), this.size = data.size, this;
}
var stackSet_default = stackSet;
 
// node_modules/lodash-es/_Stack.js
function Stack(entries) {
var data = this.__data__ = new ListCache_default(entries);
this.size = data.size;
}
Stack.prototype.clear = stackClear_default;
Stack.prototype.delete = stackDelete_default;
Stack.prototype.get = stackGet_default;
Stack.prototype.has = stackHas_default;
Stack.prototype.set = stackSet_default;
var Stack_default = Stack;
 
// node_modules/lodash-es/_baseAssign.js
function baseAssign(object, source) {
return object && copyObject_default(source, keys_default(source), object);
}
var baseAssign_default = baseAssign;
 
// node_modules/lodash-es/_baseAssignIn.js
function baseAssignIn(object, source) {
return object && copyObject_default(source, keysIn_default(source), object);
}
var baseAssignIn_default = baseAssignIn;
 
// node_modules/lodash-es/_cloneBuffer.js
var freeExports3 = typeof exports == "object" && exports && !exports.nodeType && exports, freeModule3 = freeExports3 && typeof module == "object" && module && !module.nodeType && module, moduleExports3 = freeModule3 && freeModule3.exports === freeExports3, Buffer3 = moduleExports3 ? root_default.Buffer : void 0, allocUnsafe = Buffer3 ? Buffer3.allocUnsafe : void 0;
function cloneBuffer(buffer, isDeep) {
if (isDeep)
return buffer.slice();
var length = buffer.length, result = allocUnsafe ? allocUnsafe(length) : new buffer.constructor(length);
return buffer.copy(result), result;
}
var cloneBuffer_default = cloneBuffer;
 
// node_modules/lodash-es/_arrayFilter.js
function arrayFilter(array, predicate) {
for (var index2 = -1, length = array == null ? 0 : array.length, resIndex = 0, result = []; ++index2 < length; ) {
var value = array[index2];
predicate(value, index2, array) && (result[resIndex++] = value);
}
return result;
}
var arrayFilter_default = arrayFilter;
 
// node_modules/lodash-es/stubArray.js
function stubArray() {
return [];
}
var stubArray_default = stubArray;
 
// node_modules/lodash-es/_getSymbols.js
var objectProto12 = Object.prototype, propertyIsEnumerable2 = objectProto12.propertyIsEnumerable, nativeGetSymbols = Object.getOwnPropertySymbols, getSymbols = nativeGetSymbols ? function(object) {
return object == null ? [] : (object = Object(object), arrayFilter_default(nativeGetSymbols(object), function(symbol) {
return propertyIsEnumerable2.call(object, symbol);
}));
} : stubArray_default, getSymbols_default = getSymbols;
 
// node_modules/lodash-es/_copySymbols.js
function copySymbols(source, object) {
return copyObject_default(source, getSymbols_default(source), object);
}
var copySymbols_default = copySymbols;
 
// node_modules/lodash-es/_getSymbolsIn.js
var nativeGetSymbols2 = Object.getOwnPropertySymbols, getSymbolsIn = nativeGetSymbols2 ? function(object) {
for (var result = []; object; )
arrayPush_default(result, getSymbols_default(object)), object = getPrototype_default(object);
return result;
} : stubArray_default, getSymbolsIn_default = getSymbolsIn;
 
// node_modules/lodash-es/_copySymbolsIn.js
function copySymbolsIn(source, object) {
return copyObject_default(source, getSymbolsIn_default(source), object);
}
var copySymbolsIn_default = copySymbolsIn;
 
// node_modules/lodash-es/_baseGetAllKeys.js
function baseGetAllKeys(object, keysFunc, symbolsFunc) {
var result = keysFunc(object);
return isArray_default(object) ? result : arrayPush_default(result, symbolsFunc(object));
}
var baseGetAllKeys_default = baseGetAllKeys;
 
// node_modules/lodash-es/_getAllKeys.js
function getAllKeys(object) {
return baseGetAllKeys_default(object, keys_default, getSymbols_default);
}
var getAllKeys_default = getAllKeys;
 
// node_modules/lodash-es/_getAllKeysIn.js
function getAllKeysIn(object) {
return baseGetAllKeys_default(object, keysIn_default, getSymbolsIn_default);
}
var getAllKeysIn_default = getAllKeysIn;
 
// node_modules/lodash-es/_DataView.js
var DataView = getNative_default(root_default, "DataView"), DataView_default = DataView;
 
// node_modules/lodash-es/_Promise.js
var Promise2 = getNative_default(root_default, "Promise"), Promise_default = Promise2;
 
// node_modules/lodash-es/_Set.js
var Set = getNative_default(root_default, "Set"), Set_default = Set;
 
// node_modules/lodash-es/_getTag.js
var mapTag2 = "[object Map]", objectTag2 = "[object Object]", promiseTag = "[object Promise]", setTag2 = "[object Set]", weakMapTag2 = "[object WeakMap]", dataViewTag2 = "[object DataView]", dataViewCtorString = toSource_default(DataView_default), mapCtorString = toSource_default(Map_default), promiseCtorString = toSource_default(Promise_default), setCtorString = toSource_default(Set_default), weakMapCtorString = toSource_default(WeakMap_default), getTag = baseGetTag_default;
(DataView_default && getTag(new DataView_default(new ArrayBuffer(1))) != dataViewTag2 || Map_default && getTag(new Map_default()) != mapTag2 || Promise_default && getTag(Promise_default.resolve()) != promiseTag || Set_default && getTag(new Set_default()) != setTag2 || WeakMap_default && getTag(new WeakMap_default()) != weakMapTag2) && (getTag = function(value) {
var result = baseGetTag_default(value), Ctor = result == objectTag2 ? value.constructor : void 0, ctorString = Ctor ? toSource_default(Ctor) : "";
if (ctorString)
switch (ctorString) {
case dataViewCtorString:
return dataViewTag2;
case mapCtorString:
return mapTag2;
case promiseCtorString:
return promiseTag;
case setCtorString:
return setTag2;
case weakMapCtorString:
return weakMapTag2;
}
return result;
});
var getTag_default = getTag;
 
// node_modules/lodash-es/_initCloneArray.js
var objectProto13 = Object.prototype, hasOwnProperty11 = objectProto13.hasOwnProperty;
function initCloneArray(array) {
var length = array.length, result = new array.constructor(length);
return length && typeof array[0] == "string" && hasOwnProperty11.call(array, "index") && (result.index = array.index, result.input = array.input), result;
}
var initCloneArray_default = initCloneArray;
 
// node_modules/lodash-es/_Uint8Array.js
var Uint8Array2 = root_default.Uint8Array, Uint8Array_default = Uint8Array2;
 
// node_modules/lodash-es/_cloneArrayBuffer.js
function cloneArrayBuffer(arrayBuffer) {
var result = new arrayBuffer.constructor(arrayBuffer.byteLength);
return new Uint8Array_default(result).set(new Uint8Array_default(arrayBuffer)), result;
}
var cloneArrayBuffer_default = cloneArrayBuffer;
 
// node_modules/lodash-es/_cloneDataView.js
function cloneDataView(dataView, isDeep) {
var buffer = isDeep ? cloneArrayBuffer_default(dataView.buffer) : dataView.buffer;
return new dataView.constructor(buffer, dataView.byteOffset, dataView.byteLength);
}
var cloneDataView_default = cloneDataView;
 
// node_modules/lodash-es/_cloneRegExp.js
var reFlags = /\w*$/;
function cloneRegExp(regexp) {
var result = new regexp.constructor(regexp.source, reFlags.exec(regexp));
return result.lastIndex = regexp.lastIndex, result;
}
var cloneRegExp_default = cloneRegExp;
 
// node_modules/lodash-es/_cloneSymbol.js
var symbolProto2 = Symbol_default ? Symbol_default.prototype : void 0, symbolValueOf = symbolProto2 ? symbolProto2.valueOf : void 0;
function cloneSymbol(symbol) {
return symbolValueOf ? Object(symbolValueOf.call(symbol)) : {};
}
var cloneSymbol_default = cloneSymbol;
 
// node_modules/lodash-es/_cloneTypedArray.js
function cloneTypedArray(typedArray, isDeep) {
var buffer = isDeep ? cloneArrayBuffer_default(typedArray.buffer) : typedArray.buffer;
return new typedArray.constructor(buffer, typedArray.byteOffset, typedArray.length);
}
var cloneTypedArray_default = cloneTypedArray;
 
// node_modules/lodash-es/_initCloneByTag.js
var boolTag2 = "[object Boolean]", dateTag2 = "[object Date]", mapTag3 = "[object Map]", numberTag2 = "[object Number]", regexpTag2 = "[object RegExp]", setTag3 = "[object Set]", stringTag2 = "[object String]", symbolTag2 = "[object Symbol]", arrayBufferTag2 = "[object ArrayBuffer]", dataViewTag3 = "[object DataView]", float32Tag2 = "[object Float32Array]", float64Tag2 = "[object Float64Array]", int8Tag2 = "[object Int8Array]", int16Tag2 = "[object Int16Array]", int32Tag2 = "[object Int32Array]", uint8Tag2 = "[object Uint8Array]", uint8ClampedTag2 = "[object Uint8ClampedArray]", uint16Tag2 = "[object Uint16Array]", uint32Tag2 = "[object Uint32Array]";
function initCloneByTag(object, tag, isDeep) {
var Ctor = object.constructor;
switch (tag) {
case arrayBufferTag2:
return cloneArrayBuffer_default(object);
case boolTag2:
case dateTag2:
return new Ctor(+object);
case dataViewTag3:
return cloneDataView_default(object, isDeep);
case float32Tag2:
case float64Tag2:
case int8Tag2:
case int16Tag2:
case int32Tag2:
case uint8Tag2:
case uint8ClampedTag2:
case uint16Tag2:
case uint32Tag2:
return cloneTypedArray_default(object, isDeep);
case mapTag3:
return new Ctor();
case numberTag2:
case stringTag2:
return new Ctor(object);
case regexpTag2:
return cloneRegExp_default(object);
case setTag3:
return new Ctor();
case symbolTag2:
return cloneSymbol_default(object);
}
}
var initCloneByTag_default = initCloneByTag;
 
// node_modules/lodash-es/_initCloneObject.js
function initCloneObject(object) {
return typeof object.constructor == "function" && !isPrototype_default(object) ? baseCreate_default(getPrototype_default(object)) : {};
}
var initCloneObject_default = initCloneObject;
 
// node_modules/lodash-es/_baseIsMap.js
var mapTag4 = "[object Map]";
function baseIsMap(value) {
return isObjectLike_default(value) && getTag_default(value) == mapTag4;
}
var baseIsMap_default = baseIsMap;
 
// node_modules/lodash-es/isMap.js
var nodeIsMap = nodeUtil_default && nodeUtil_default.isMap, isMap = nodeIsMap ? baseUnary_default(nodeIsMap) : baseIsMap_default, isMap_default = isMap;
 
// node_modules/lodash-es/_baseIsSet.js
var setTag4 = "[object Set]";
function baseIsSet(value) {
return isObjectLike_default(value) && getTag_default(value) == setTag4;
}
var baseIsSet_default = baseIsSet;
 
// node_modules/lodash-es/isSet.js
var nodeIsSet = nodeUtil_default && nodeUtil_default.isSet, isSet = nodeIsSet ? baseUnary_default(nodeIsSet) : baseIsSet_default, isSet_default = isSet;
 
// node_modules/lodash-es/_baseClone.js
var CLONE_DEEP_FLAG = 1, CLONE_FLAT_FLAG = 2, CLONE_SYMBOLS_FLAG = 4, argsTag3 = "[object Arguments]", arrayTag2 = "[object Array]", boolTag3 = "[object Boolean]", dateTag3 = "[object Date]", errorTag2 = "[object Error]", funcTag3 = "[object Function]", genTag2 = "[object GeneratorFunction]", mapTag5 = "[object Map]", numberTag3 = "[object Number]", objectTag3 = "[object Object]", regexpTag3 = "[object RegExp]", setTag5 = "[object Set]", stringTag3 = "[object String]", symbolTag3 = "[object Symbol]", weakMapTag3 = "[object WeakMap]", arrayBufferTag3 = "[object ArrayBuffer]", dataViewTag4 = "[object DataView]", float32Tag3 = "[object Float32Array]", float64Tag3 = "[object Float64Array]", int8Tag3 = "[object Int8Array]", int16Tag3 = "[object Int16Array]", int32Tag3 = "[object Int32Array]", uint8Tag3 = "[object Uint8Array]", uint8ClampedTag3 = "[object Uint8ClampedArray]", uint16Tag3 = "[object Uint16Array]", uint32Tag3 = "[object Uint32Array]", cloneableTags = {};
cloneableTags[argsTag3] = cloneableTags[arrayTag2] = cloneableTags[arrayBufferTag3] = cloneableTags[dataViewTag4] = cloneableTags[boolTag3] = cloneableTags[dateTag3] = cloneableTags[float32Tag3] = cloneableTags[float64Tag3] = cloneableTags[int8Tag3] = cloneableTags[int16Tag3] = cloneableTags[int32Tag3] = cloneableTags[mapTag5] = cloneableTags[numberTag3] = cloneableTags[objectTag3] = cloneableTags[regexpTag3] = cloneableTags[setTag5] = cloneableTags[stringTag3] = cloneableTags[symbolTag3] = cloneableTags[uint8Tag3] = cloneableTags[uint8ClampedTag3] = cloneableTags[uint16Tag3] = cloneableTags[uint32Tag3] = !0;
cloneableTags[errorTag2] = cloneableTags[funcTag3] = cloneableTags[weakMapTag3] = !1;
function baseClone(value, bitmask, customizer, key, object, stack) {
var result, isDeep = bitmask & CLONE_DEEP_FLAG, isFlat = bitmask & CLONE_FLAT_FLAG, isFull = bitmask & CLONE_SYMBOLS_FLAG;
if (customizer && (result = object ? customizer(value, key, object, stack) : customizer(value)), result !== void 0)
return result;
if (!isObject_default(value))
return value;
var isArr = isArray_default(value);
if (isArr) {
if (result = initCloneArray_default(value), !isDeep)
return copyArray_default(value, result);
} else {
var tag = getTag_default(value), isFunc = tag == funcTag3 || tag == genTag2;
if (isBuffer_default(value))
return cloneBuffer_default(value, isDeep);
if (tag == objectTag3 || tag == argsTag3 || isFunc && !object) {
if (result = isFlat || isFunc ? {} : initCloneObject_default(value), !isDeep)
return isFlat ? copySymbolsIn_default(value, baseAssignIn_default(result, value)) : copySymbols_default(value, baseAssign_default(result, value));
} else {
if (!cloneableTags[tag])
return object ? value : {};
result = initCloneByTag_default(value, tag, isDeep);
}
}
stack || (stack = new Stack_default());
var stacked = stack.get(value);
if (stacked)
return stacked;
stack.set(value, result), isSet_default(value) ? value.forEach(function(subValue) {
result.add(baseClone(subValue, bitmask, customizer, subValue, value, stack));
}) : isMap_default(value) && value.forEach(function(subValue, key2) {
result.set(key2, baseClone(subValue, bitmask, customizer, key2, value, stack));
});
var keysFunc = isFull ? isFlat ? getAllKeysIn_default : getAllKeys_default : isFlat ? keysIn_default : keys_default, props = isArr ? void 0 : keysFunc(value);
return arrayEach_default(props || value, function(subValue, key2) {
props && (key2 = subValue, subValue = value[key2]), assignValue_default(result, key2, baseClone(subValue, bitmask, customizer, key2, value, stack));
}), result;
}
var baseClone_default = baseClone;
 
// node_modules/lodash-es/cloneDeep.js
var CLONE_DEEP_FLAG2 = 1, CLONE_SYMBOLS_FLAG2 = 4;
function cloneDeep(value) {
return baseClone_default(value, CLONE_DEEP_FLAG2 | CLONE_SYMBOLS_FLAG2);
}
var cloneDeep_default = cloneDeep;
 
// node_modules/lodash-es/escapeRegExp.js
var reRegExpChar2 = /[\\^$.*+?()[\]{}|]/g, reHasRegExpChar = RegExp(reRegExpChar2.source);
function escapeRegExp(string3) {
return string3 = toString_default(string3), string3 && reHasRegExpChar.test(string3) ? string3.replace(reRegExpChar2, "\\$&") : string3;
}
var escapeRegExp_default = escapeRegExp;
 
// node_modules/nanoid/url-alphabet/index.js
var urlAlphabet = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict";
 
// node_modules/nanoid/index.browser.js
var nanoid = (size = 21) => {
let id = "", bytes = crypto.getRandomValues(new Uint8Array(size));
for (; size--; )
id += urlAlphabet[bytes[size] & 63];
return id;
};
 
// src-autolink/core/linker.js
async function processTextNodes(markdownText, callback, {
avoidLinks = !0,
avoidHeaders = !0
} = {}) {
let ast = await parse2(markdownText);
console.log("ast", ast, markdownText);
let flattenedTextNodes = [];
visitParents(ast, "text", (node2, ancestors) => {
let isInsideLink = ancestors.some((ancestor) => ancestor.type === "link");
if (avoidLinks && isInsideLink) return;
let isInsideHeader = ancestors.some((ancestor) => ancestor.type === "heading");
avoidHeaders && isInsideHeader || flattenedTextNodes.push(node2);
}), flattenedTextNodes.sort((a, b) => b.position.start.offset - a.position.start.offset);
let resultMarkdownText = markdownText;
for (let node2 of flattenedTextNodes) {
let linkedText = callback(node2.value);
if (linkedText !== node2.value) {
let start = node2.position.start.offset, end = node2.position.end.offset;
resultMarkdownText = resultMarkdownText.slice(0, start) + linkedText + resultMarkdownText.slice(end);
}
}
return resultMarkdownText;
}
async function addPageLinksToMarkdown(markdownText, pages) {
let replacementMap = /* @__PURE__ */ new Map(), originalMap = /* @__PURE__ */ new Map(), sortedPages = cloneDeep_default(pages).sort((a, b) => -a.name.localeCompare(b.name)), preReplacementMarkdown = await processTextNodes(markdownText, (text3) => (sortedPages.forEach((page) => {
let pageNameEscaped = escapeRegExp_default(page.name), regex = new RegExp(`((?<=^|\\s|,))(${pageNameEscaped})((,|!|\\.)*?)($|\\s|\\n|,)`, "gi");
text3 = text3.replace(regex, (match, g1, g2, g3, g4, g5) => {
let uuid = g1 + nanoid() + g3 + g4 + g5;
return originalMap.set(uuid, match), replacementMap.set(uuid, `${g1}[${g2}](https://www.amplenote.com/notes/${page.uuid})${g3}${g5}`), uuid;
});
}), text3));
return { originalMap, replacementMap, preReplacementMarkdown };
}
async function addSectionLinksToMarkdown(markdownText, sectionsMap) {
let replacementMap = /* @__PURE__ */ new Map(), originalMap = /* @__PURE__ */ new Map(), sortedSections = Object.keys(sectionsMap).sort((a, b) => b.length - a.length), preReplacementMarkdown = await processTextNodes(markdownText, (text3) => (sortedSections.forEach((section) => {
let sectionNameEscaped = escapeRegExp_default(section), regex = new RegExp(`((?<=^|\\s|,))(${sectionNameEscaped})((,|!|\\.)*?)($|\\s|\\n|,)`, "gi");
text3 = text3.replace(regex, (match, g1, g2, g3, g4, g5) => {
let uuid = g1 + nanoid() + g3 + g4 + g5;
return originalMap.set(uuid, match), replacementMap.set(uuid, `${g1}[${section}](https://www.amplenote.com/notes/${sectionsMap[section].noteUUID}#${sectionsMap[section].anchor})${g3}${g5}`), uuid;
});
}), text3));
return { originalMap, replacementMap, preReplacementMarkdown };
}
function processReplacementMap(markdownText, replacementMap) {
let result = markdownText;
return replacementMap.forEach((replacement, uuid) => {
result = result.replace(uuid.trim(), replacement.trim());
}), result;
}
 
// node_modules/unist-util-visit/lib/index.js
function visit(tree, testOrVisitor, visitorOrReverse, maybeReverse) {
let reverse, test, visitor;
typeof testOrVisitor == "function" && typeof visitorOrReverse != "function" ? (test = void 0, visitor = testOrVisitor, reverse = visitorOrReverse) : (test = testOrVisitor, visitor = visitorOrReverse, reverse = maybeReverse), visitParents(tree, test, overload, reverse);
function overload(node2, parents) {
let parent = parents[parents.length - 1], index2 = parent ? parent.children.indexOf(node2) : void 0;
return visitor(node2, index2, parent);
}
}
 
// src-autolink/core/removeLinksFromMarkdown.js
async function removeLinksFromMarkdown(markdownText) {
let ast = await parse2(markdownText), resultMarkdownText = markdownText, flattenedLinkNodes = [];
visit(ast, "link", (node2, index2, parent) => {
flattenedLinkNodes.push(node2);
}), flattenedLinkNodes.sort((a, b) => b.position.start.offset - a.position.start.offset);
for (let node2 of flattenedLinkNodes) {
let start = node2.position.start.offset, end = node2.position.end.offset;
resultMarkdownText = resultMarkdownText.slice(0, start) + get_default(node2, "children[0].value", "") + resultMarkdownText.slice(end);
}
return resultMarkdownText;
}
 
// src-autolink/plugin.js
var plugin = {
replaceText: async function(app, text3) {
try {
let textWithFormatting = app.context.selectionContent;
await plugin._autoLink(app, textWithFormatting, async ({ preReplacementMarkdown, replacementMap, originalMap }) => {
let autoLinkedMarkdown = processReplacementMap(preReplacementMarkdown, replacementMap);
await app.context.replaceSelection(autoLinkedMarkdown);
});
} catch (e) {
await app.alert(e);
}
},
noteOption: async function(app, noteUUID) {
try {
let noteContent = await app.getNoteContent({ uuid: noteUUID });
await plugin._autoLink(app, noteContent, async ({ preReplacementMarkdown, replacementMap, originalMap }) => {
if (replacementMap.size === 0)
return;
let confirmedReplacements = await app.prompt("Select replacements to apply:", {
inputs: Array.from(replacementMap).map(([key, value]) => ({
label: `${originalMap.get(key)} \u279B ${value}`,
type: "checkbox",
value: !0
}))
});
if (!confirmedReplacements) return;
typeof confirmedReplacements == "boolean" && (confirmedReplacements = [confirmedReplacements]);
let finalReplacementMap = /* @__PURE__ */ new Map();
Array.from(replacementMap).forEach(([key, value], index2) => {
confirmedReplacements[index2] ? finalReplacementMap.set(key, value) : finalReplacementMap.set(key, originalMap.get(key));
});
let autoLinkedText = processReplacementMap(preReplacementMarkdown, finalReplacementMap);
await app.replaceNoteContent({ uuid: noteUUID }, autoLinkedText);
let newNoteContent = await app.getNoteContent({ uuid: noteUUID });
(await removeLinksFromMarkdown(newNoteContent)).trim() !== (await removeLinksFromMarkdown(autoLinkedText)).trim() && console.log("Autolinked note content is different from original note content");
});
} catch (e) {
await app.alert(e);
}
},
async _autoLink(app, text3, replaceTextFn) {
try {
let pages = await plugin._getPages(app), { preReplacementMarkdown, replacementMap, originalMap } = await addPageLinksToMarkdown(text3, pages), isAutoLinkSectionsEnabled = (app.settings[AUTOLINK_RELATED_NOTES_SECTION_SETTING] || AUTOLINK_RELATED_NOTES_SECTION_SETTING_DEFAULT) === "true";
if (preReplacementMarkdown !== text3 && !isAutoLinkSectionsEnabled)
await replaceTextFn({ preReplacementMarkdown, replacementMap, originalMap });
else if (isAutoLinkSectionsEnabled) {
let sectionMap = await plugin._getSections(app), { preReplacementMarkdown: preReplacementMarkdown2, replacementMap: replacementMap2, originalMap: originalMap2 } = await addSectionLinksToMarkdown(preReplacementMarkdown, sectionMap);
if (preReplacementMarkdown2 !== text3) {
let preReplacementMarkdownCombined = preReplacementMarkdown2, replacementMapCombined = new Map([...replacementMap, ...replacementMap2]), originalMapCombined = new Map([...originalMap, ...originalMap2]);
await replaceTextFn({ preReplacementMarkdown: preReplacementMarkdownCombined, replacementMap: replacementMapCombined, originalMap: originalMapCombined });
}
}
} catch (e) {
throw e;
}
},
async _getPages(app) {
try {
let nonEmptyPages = (await app.filterNotes({})).filter((page) => page.name != null && typeof page.name == "string" && page.name.trim() !== "");
return app.settings[MIN_PAGE_LENGTH_SETTING] = app.settings[MIN_PAGE_LENGTH_SETTING] || MIN_PAGE_LENGTH_SETTING_DEFAULT, nonEmptyPages.filter((page) => page.name.length >= app.settings[MIN_PAGE_LENGTH_SETTING]);
} catch (e) {
throw "Failed _getSortedPages - " + e;
}
},
async _getSections(app) {
try {
let currentNoteBacklinks = await app.getNoteBacklinks({ uuid: app.context.noteUUID }), currentNoteForwardLinks = [], currentPageContent = await app.getNoteContent({ uuid: app.context.noteUUID });
for (let uuid of await getNoteLinksUUIDFromMarkdown(currentPageContent)) {
let page = await app.findNote({ uuid });
page && currentNoteForwardLinks.push(page);
}
let currentPage = await app.findNote({ uuid: app.context.noteUUID });
app.settings[MIN_PAGE_LENGTH_SETTING] = app.settings[MIN_PAGE_LENGTH_SETTING] || MIN_PAGE_LENGTH_SETTING_DEFAULT;
let sectionMap = {};
for (let note of [...currentNoteBacklinks, ...currentNoteForwardLinks, currentPage])
note.uuid && (await app.getNoteSections({ uuid: note.uuid }) || []).forEach((section) => {
section && section.heading && section.heading.text && section.heading.text.length > app.settings[MIN_PAGE_LENGTH_SETTING] && section.heading.text.trim() !== "" && section.heading.anchor && section.heading.anchor.length > app.settings[MIN_PAGE_LENGTH_SETTING] && section.heading.anchor.trim() !== "" && (sectionMap[section.heading.text] || (sectionMap[section.heading.text] = {
anchor: section.heading.anchor,
noteUUID: note.uuid
}));
});
return console.log(sectionMap), sectionMap;
} catch (e) {
throw "Failed getSortedSections - " + e;
}
}
}, plugin_default = plugin;
return plugin;
})()
//# sourceMappingURL=plugin.js.map
 


linkChangelog

15/07/2023: First version

05/04/2024: Add support for retaining markdown formatting

28/07/2024: Introduce full page noteOption. Greatly improve markdown support. Also, added experimental support for section linking.

13/09/2024: Reduced bundle size + Bug fixes. Also, added replacement selection support when triggering the plugin from noteOption.

28/09/2024: Reduced bundle size.