codigo0/node_modules/.vite/deps/react-markdown.js
planetazuzu 5d7a6500fe refactor: Fase 1 - Clean Architecture, refactorización modular y eliminación de duplicidades
-  Ticket 1.1: Estructura Clean Architecture en backend
-  Ticket 1.2: Schemas Zod compartidos
-  Ticket 1.3: Refactorización drugs.ts (1362 → 8 archivos modulares)
-  Ticket 1.4: Refactorización procedures.ts (3583 → 6 archivos modulares)
-  Ticket 1.5: Eliminación de duplicidades (~50 líneas)

Cambios principales:
- Creada estructura Clean Architecture en backend/src/
- Schemas Zod compartidos en backend/src/shared/schemas/
- Refactorización modular de drugs y procedures
- Utilidades genéricas en src/utils/ (filter, validation)
- Eliminados scripts obsoletos y documentación antigua
- Corregidos errores: QueryClient, import test-error-handling
- Build verificado y funcionando correctamente
2026-01-25 21:09:47 +01:00

7914 lines
228 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import {
VFileMessage,
ok,
pointEnd,
pointStart,
position,
stringifyPosition,
toJsxRuntime,
unreachable
} from "./chunk-3UIXA6L7.js";
import {
require_jsx_runtime
} from "./chunk-4UTF2CDO.js";
import {
require_react
} from "./chunk-BXEBRY3I.js";
import {
__commonJS,
__export,
__toESM
} from "./chunk-V4OQ3NZ2.js";
// node_modules/extend/index.js
var require_extend = __commonJS({
"node_modules/extend/index.js"(exports, module) {
"use strict";
var hasOwn = Object.prototype.hasOwnProperty;
var toStr = Object.prototype.toString;
var defineProperty = Object.defineProperty;
var gOPD = Object.getOwnPropertyDescriptor;
var isArray = function isArray2(arr) {
if (typeof Array.isArray === "function") {
return Array.isArray(arr);
}
return toStr.call(arr) === "[object Array]";
};
var isPlainObject2 = function isPlainObject3(obj) {
if (!obj || toStr.call(obj) !== "[object Object]") {
return false;
}
var hasOwnConstructor = hasOwn.call(obj, "constructor");
var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, "isPrototypeOf");
if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) {
return false;
}
var key;
for (key in obj) {
}
return typeof key === "undefined" || hasOwn.call(obj, key);
};
var setProperty = function setProperty2(target, options) {
if (defineProperty && options.name === "__proto__") {
defineProperty(target, options.name, {
enumerable: true,
configurable: true,
value: options.newValue,
writable: true
});
} else {
target[options.name] = options.newValue;
}
};
var getProperty = function getProperty2(obj, name) {
if (name === "__proto__") {
if (!hasOwn.call(obj, name)) {
return void 0;
} else if (gOPD) {
return gOPD(obj, name).value;
}
}
return obj[name];
};
module.exports = function extend2() {
var options, name, src, copy, copyIsArray, clone;
var target = arguments[0];
var i = 1;
var length = arguments.length;
var deep = false;
if (typeof target === "boolean") {
deep = target;
target = arguments[1] || {};
i = 2;
}
if (target == null || typeof target !== "object" && typeof target !== "function") {
target = {};
}
for (; i < length; ++i) {
options = arguments[i];
if (options != null) {
for (name in options) {
src = getProperty(target, name);
copy = getProperty(options, name);
if (target !== copy) {
if (deep && copy && (isPlainObject2(copy) || (copyIsArray = isArray(copy)))) {
if (copyIsArray) {
copyIsArray = false;
clone = src && isArray(src) ? src : [];
} else {
clone = src && isPlainObject2(src) ? src : {};
}
setProperty(target, { name, newValue: extend2(deep, clone, copy) });
} else if (typeof copy !== "undefined") {
setProperty(target, { name, newValue: copy });
}
}
}
}
}
return target;
};
}
});
// node_modules/html-url-attributes/lib/index.js
var urlAttributes = {
action: ["form"],
cite: ["blockquote", "del", "ins", "q"],
data: ["object"],
formAction: ["button", "input"],
href: ["a", "area", "base", "link"],
icon: ["menuitem"],
itemId: null,
manifest: ["html"],
ping: ["a", "area"],
poster: ["video"],
src: [
"audio",
"embed",
"iframe",
"img",
"input",
"script",
"source",
"track",
"video"
]
};
// node_modules/react-markdown/lib/index.js
var import_jsx_runtime = __toESM(require_jsx_runtime(), 1);
var import_react = __toESM(require_react(), 1);
// node_modules/mdast-util-to-string/lib/index.js
var emptyOptions = {};
function toString(value, options) {
const settings = options || emptyOptions;
const includeImageAlt = typeof settings.includeImageAlt === "boolean" ? settings.includeImageAlt : true;
const includeHtml = typeof settings.includeHtml === "boolean" ? settings.includeHtml : true;
return one(value, includeImageAlt, includeHtml);
}
function one(value, includeImageAlt, includeHtml) {
if (node(value)) {
if ("value" in value) {
return value.type === "html" && !includeHtml ? "" : value.value;
}
if (includeImageAlt && "alt" in value && value.alt) {
return value.alt;
}
if ("children" in value) {
return all(value.children, includeImageAlt, includeHtml);
}
}
if (Array.isArray(value)) {
return all(value, includeImageAlt, includeHtml);
}
return "";
}
function all(values, includeImageAlt, includeHtml) {
const result = [];
let index = -1;
while (++index < values.length) {
result[index] = one(values[index], includeImageAlt, includeHtml);
}
return result.join("");
}
function node(value) {
return Boolean(value && typeof value === "object");
}
// node_modules/decode-named-character-reference/index.dom.js
var element = document.createElement("i");
function decodeNamedCharacterReference(value) {
const characterReference2 = "&" + value + ";";
element.innerHTML = characterReference2;
const character = element.textContent;
if (
// @ts-expect-error: TypeScript is wrong that `textContent` on elements can
// yield `null`.
character.charCodeAt(character.length - 1) === 59 && value !== "semi"
) {
return false;
}
return character === characterReference2 ? false : character;
}
// node_modules/micromark-util-chunked/index.js
function splice(list3, start, remove, items) {
const end = list3.length;
let chunkStart = 0;
let parameters;
if (start < 0) {
start = -start > end ? 0 : end + start;
} else {
start = start > end ? end : start;
}
remove = remove > 0 ? remove : 0;
if (items.length < 1e4) {
parameters = Array.from(items);
parameters.unshift(start, remove);
list3.splice(...parameters);
} else {
if (remove) list3.splice(start, remove);
while (chunkStart < items.length) {
parameters = items.slice(chunkStart, chunkStart + 1e4);
parameters.unshift(start, 0);
list3.splice(...parameters);
chunkStart += 1e4;
start += 1e4;
}
}
}
function push(list3, items) {
if (list3.length > 0) {
splice(list3, list3.length, 0, items);
return list3;
}
return items;
}
// node_modules/micromark-util-combine-extensions/index.js
var hasOwnProperty = {}.hasOwnProperty;
function combineExtensions(extensions) {
const all2 = {};
let index = -1;
while (++index < extensions.length) {
syntaxExtension(all2, extensions[index]);
}
return all2;
}
function syntaxExtension(all2, extension2) {
let hook;
for (hook in extension2) {
const maybe = hasOwnProperty.call(all2, hook) ? all2[hook] : void 0;
const left = maybe || (all2[hook] = {});
const right = extension2[hook];
let code2;
if (right) {
for (code2 in right) {
if (!hasOwnProperty.call(left, code2)) left[code2] = [];
const value = right[code2];
constructs(
// @ts-expect-error Looks like a list.
left[code2],
Array.isArray(value) ? value : value ? [value] : []
);
}
}
}
}
function constructs(existing, list3) {
let index = -1;
const before = [];
while (++index < list3.length) {
;
(list3[index].add === "after" ? existing : before).push(list3[index]);
}
splice(existing, 0, 0, before);
}
// node_modules/micromark-util-decode-numeric-character-reference/index.js
function decodeNumericCharacterReference(value, base) {
const code2 = Number.parseInt(value, base);
if (
// C0 except for HT, LF, FF, CR, space.
code2 < 9 || code2 === 11 || code2 > 13 && code2 < 32 || // Control character (DEL) of C0, and C1 controls.
code2 > 126 && code2 < 160 || // Lone high surrogates and low surrogates.
code2 > 55295 && code2 < 57344 || // Noncharacters.
code2 > 64975 && code2 < 65008 || /* eslint-disable no-bitwise */
(code2 & 65535) === 65535 || (code2 & 65535) === 65534 || /* eslint-enable no-bitwise */
// Out of range
code2 > 1114111
) {
return "<22>";
}
return String.fromCodePoint(code2);
}
// node_modules/micromark-util-normalize-identifier/index.js
function normalizeIdentifier(value) {
return value.replace(/[\t\n\r ]+/g, " ").replace(/^ | $/g, "").toLowerCase().toUpperCase();
}
// node_modules/micromark-util-character/index.js
var asciiAlpha = regexCheck(/[A-Za-z]/);
var asciiAlphanumeric = regexCheck(/[\dA-Za-z]/);
var asciiAtext = regexCheck(/[#-'*+\--9=?A-Z^-~]/);
function asciiControl(code2) {
return (
// Special whitespace codes (which have negative values), C0 and Control
// character DEL
code2 !== null && (code2 < 32 || code2 === 127)
);
}
var asciiDigit = regexCheck(/\d/);
var asciiHexDigit = regexCheck(/[\dA-Fa-f]/);
var asciiPunctuation = regexCheck(/[!-/:-@[-`{-~]/);
function markdownLineEnding(code2) {
return code2 !== null && code2 < -2;
}
function markdownLineEndingOrSpace(code2) {
return code2 !== null && (code2 < 0 || code2 === 32);
}
function markdownSpace(code2) {
return code2 === -2 || code2 === -1 || code2 === 32;
}
var unicodePunctuation = regexCheck(/\p{P}|\p{S}/u);
var unicodeWhitespace = regexCheck(/\s/);
function regexCheck(regex) {
return check;
function check(code2) {
return code2 !== null && code2 > -1 && regex.test(String.fromCharCode(code2));
}
}
// node_modules/micromark-util-sanitize-uri/index.js
function normalizeUri(value) {
const result = [];
let index = -1;
let start = 0;
let skip = 0;
while (++index < value.length) {
const code2 = value.charCodeAt(index);
let replace = "";
if (code2 === 37 && asciiAlphanumeric(value.charCodeAt(index + 1)) && asciiAlphanumeric(value.charCodeAt(index + 2))) {
skip = 2;
} else if (code2 < 128) {
if (!/[!#$&-;=?-Z_a-z~]/.test(String.fromCharCode(code2))) {
replace = String.fromCharCode(code2);
}
} else if (code2 > 55295 && code2 < 57344) {
const next = value.charCodeAt(index + 1);
if (code2 < 56320 && next > 56319 && next < 57344) {
replace = String.fromCharCode(code2, next);
skip = 1;
} else {
replace = "<22>";
}
} else {
replace = String.fromCharCode(code2);
}
if (replace) {
result.push(value.slice(start, index), encodeURIComponent(replace));
start = index + skip + 1;
replace = "";
}
if (skip) {
index += skip;
skip = 0;
}
}
return result.join("") + value.slice(start);
}
// node_modules/micromark/lib/compile.js
var hasOwnProperty2 = {}.hasOwnProperty;
// node_modules/micromark-factory-space/index.js
function factorySpace(effects, ok3, type, max) {
const limit = max ? max - 1 : Number.POSITIVE_INFINITY;
let size = 0;
return start;
function start(code2) {
if (markdownSpace(code2)) {
effects.enter(type);
return prefix(code2);
}
return ok3(code2);
}
function prefix(code2) {
if (markdownSpace(code2) && size++ < limit) {
effects.consume(code2);
return prefix;
}
effects.exit(type);
return ok3(code2);
}
}
// node_modules/micromark/lib/initialize/content.js
var content = {
tokenize: initializeContent
};
function initializeContent(effects) {
const contentStart = effects.attempt(this.parser.constructs.contentInitial, afterContentStartConstruct, paragraphInitial);
let previous2;
return contentStart;
function afterContentStartConstruct(code2) {
if (code2 === null) {
effects.consume(code2);
return;
}
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return factorySpace(effects, contentStart, "linePrefix");
}
function paragraphInitial(code2) {
effects.enter("paragraph");
return lineStart(code2);
}
function lineStart(code2) {
const token = effects.enter("chunkText", {
contentType: "text",
previous: previous2
});
if (previous2) {
previous2.next = token;
}
previous2 = token;
return data(code2);
}
function data(code2) {
if (code2 === null) {
effects.exit("chunkText");
effects.exit("paragraph");
effects.consume(code2);
return;
}
if (markdownLineEnding(code2)) {
effects.consume(code2);
effects.exit("chunkText");
return lineStart;
}
effects.consume(code2);
return data;
}
}
// node_modules/micromark/lib/initialize/document.js
var document2 = {
tokenize: initializeDocument
};
var containerConstruct = {
tokenize: tokenizeContainer
};
function initializeDocument(effects) {
const self2 = this;
const stack = [];
let continued = 0;
let childFlow;
let childToken;
let lineStartOffset;
return start;
function start(code2) {
if (continued < stack.length) {
const item = stack[continued];
self2.containerState = item[1];
return effects.attempt(item[0].continuation, documentContinue, checkNewContainers)(code2);
}
return checkNewContainers(code2);
}
function documentContinue(code2) {
continued++;
if (self2.containerState._closeFlow) {
self2.containerState._closeFlow = void 0;
if (childFlow) {
closeFlow();
}
const indexBeforeExits = self2.events.length;
let indexBeforeFlow = indexBeforeExits;
let point2;
while (indexBeforeFlow--) {
if (self2.events[indexBeforeFlow][0] === "exit" && self2.events[indexBeforeFlow][1].type === "chunkFlow") {
point2 = self2.events[indexBeforeFlow][1].end;
break;
}
}
exitContainers(continued);
let index = indexBeforeExits;
while (index < self2.events.length) {
self2.events[index][1].end = {
...point2
};
index++;
}
splice(self2.events, indexBeforeFlow + 1, 0, self2.events.slice(indexBeforeExits));
self2.events.length = index;
return checkNewContainers(code2);
}
return start(code2);
}
function checkNewContainers(code2) {
if (continued === stack.length) {
if (!childFlow) {
return documentContinued(code2);
}
if (childFlow.currentConstruct && childFlow.currentConstruct.concrete) {
return flowStart(code2);
}
self2.interrupt = Boolean(childFlow.currentConstruct && !childFlow._gfmTableDynamicInterruptHack);
}
self2.containerState = {};
return effects.check(containerConstruct, thereIsANewContainer, thereIsNoNewContainer)(code2);
}
function thereIsANewContainer(code2) {
if (childFlow) closeFlow();
exitContainers(continued);
return documentContinued(code2);
}
function thereIsNoNewContainer(code2) {
self2.parser.lazy[self2.now().line] = continued !== stack.length;
lineStartOffset = self2.now().offset;
return flowStart(code2);
}
function documentContinued(code2) {
self2.containerState = {};
return effects.attempt(containerConstruct, containerContinue, flowStart)(code2);
}
function containerContinue(code2) {
continued++;
stack.push([self2.currentConstruct, self2.containerState]);
return documentContinued(code2);
}
function flowStart(code2) {
if (code2 === null) {
if (childFlow) closeFlow();
exitContainers(0);
effects.consume(code2);
return;
}
childFlow = childFlow || self2.parser.flow(self2.now());
effects.enter("chunkFlow", {
_tokenizer: childFlow,
contentType: "flow",
previous: childToken
});
return flowContinue(code2);
}
function flowContinue(code2) {
if (code2 === null) {
writeToChild(effects.exit("chunkFlow"), true);
exitContainers(0);
effects.consume(code2);
return;
}
if (markdownLineEnding(code2)) {
effects.consume(code2);
writeToChild(effects.exit("chunkFlow"));
continued = 0;
self2.interrupt = void 0;
return start;
}
effects.consume(code2);
return flowContinue;
}
function writeToChild(token, endOfFile) {
const stream = self2.sliceStream(token);
if (endOfFile) stream.push(null);
token.previous = childToken;
if (childToken) childToken.next = token;
childToken = token;
childFlow.defineSkip(token.start);
childFlow.write(stream);
if (self2.parser.lazy[token.start.line]) {
let index = childFlow.events.length;
while (index--) {
if (
// The token starts before the line ending…
childFlow.events[index][1].start.offset < lineStartOffset && // …and either is not ended yet…
(!childFlow.events[index][1].end || // …or ends after it.
childFlow.events[index][1].end.offset > lineStartOffset)
) {
return;
}
}
const indexBeforeExits = self2.events.length;
let indexBeforeFlow = indexBeforeExits;
let seen;
let point2;
while (indexBeforeFlow--) {
if (self2.events[indexBeforeFlow][0] === "exit" && self2.events[indexBeforeFlow][1].type === "chunkFlow") {
if (seen) {
point2 = self2.events[indexBeforeFlow][1].end;
break;
}
seen = true;
}
}
exitContainers(continued);
index = indexBeforeExits;
while (index < self2.events.length) {
self2.events[index][1].end = {
...point2
};
index++;
}
splice(self2.events, indexBeforeFlow + 1, 0, self2.events.slice(indexBeforeExits));
self2.events.length = index;
}
}
function exitContainers(size) {
let index = stack.length;
while (index-- > size) {
const entry = stack[index];
self2.containerState = entry[1];
entry[0].exit.call(self2, effects);
}
stack.length = size;
}
function closeFlow() {
childFlow.write([null]);
childToken = void 0;
childFlow = void 0;
self2.containerState._closeFlow = void 0;
}
}
function tokenizeContainer(effects, ok3, nok) {
return factorySpace(effects, effects.attempt(this.parser.constructs.document, ok3, nok), "linePrefix", this.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4);
}
// node_modules/micromark-util-classify-character/index.js
function classifyCharacter(code2) {
if (code2 === null || markdownLineEndingOrSpace(code2) || unicodeWhitespace(code2)) {
return 1;
}
if (unicodePunctuation(code2)) {
return 2;
}
}
// node_modules/micromark-util-resolve-all/index.js
function resolveAll(constructs2, events, context) {
const called = [];
let index = -1;
while (++index < constructs2.length) {
const resolve = constructs2[index].resolveAll;
if (resolve && !called.includes(resolve)) {
events = resolve(events, context);
called.push(resolve);
}
}
return events;
}
// node_modules/micromark-core-commonmark/lib/attention.js
var attention = {
name: "attention",
resolveAll: resolveAllAttention,
tokenize: tokenizeAttention
};
function resolveAllAttention(events, context) {
let index = -1;
let open;
let group;
let text4;
let openingSequence;
let closingSequence;
let use;
let nextEvents;
let offset;
while (++index < events.length) {
if (events[index][0] === "enter" && events[index][1].type === "attentionSequence" && events[index][1]._close) {
open = index;
while (open--) {
if (events[open][0] === "exit" && events[open][1].type === "attentionSequence" && events[open][1]._open && // If the markers are the same:
context.sliceSerialize(events[open][1]).charCodeAt(0) === context.sliceSerialize(events[index][1]).charCodeAt(0)) {
if ((events[open][1]._close || events[index][1]._open) && (events[index][1].end.offset - events[index][1].start.offset) % 3 && !((events[open][1].end.offset - events[open][1].start.offset + events[index][1].end.offset - events[index][1].start.offset) % 3)) {
continue;
}
use = events[open][1].end.offset - events[open][1].start.offset > 1 && events[index][1].end.offset - events[index][1].start.offset > 1 ? 2 : 1;
const start = {
...events[open][1].end
};
const end = {
...events[index][1].start
};
movePoint(start, -use);
movePoint(end, use);
openingSequence = {
type: use > 1 ? "strongSequence" : "emphasisSequence",
start,
end: {
...events[open][1].end
}
};
closingSequence = {
type: use > 1 ? "strongSequence" : "emphasisSequence",
start: {
...events[index][1].start
},
end
};
text4 = {
type: use > 1 ? "strongText" : "emphasisText",
start: {
...events[open][1].end
},
end: {
...events[index][1].start
}
};
group = {
type: use > 1 ? "strong" : "emphasis",
start: {
...openingSequence.start
},
end: {
...closingSequence.end
}
};
events[open][1].end = {
...openingSequence.start
};
events[index][1].start = {
...closingSequence.end
};
nextEvents = [];
if (events[open][1].end.offset - events[open][1].start.offset) {
nextEvents = push(nextEvents, [["enter", events[open][1], context], ["exit", events[open][1], context]]);
}
nextEvents = push(nextEvents, [["enter", group, context], ["enter", openingSequence, context], ["exit", openingSequence, context], ["enter", text4, context]]);
nextEvents = push(nextEvents, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + 1, index), context));
nextEvents = push(nextEvents, [["exit", text4, context], ["enter", closingSequence, context], ["exit", closingSequence, context], ["exit", group, context]]);
if (events[index][1].end.offset - events[index][1].start.offset) {
offset = 2;
nextEvents = push(nextEvents, [["enter", events[index][1], context], ["exit", events[index][1], context]]);
} else {
offset = 0;
}
splice(events, open - 1, index - open + 3, nextEvents);
index = open + nextEvents.length - offset - 2;
break;
}
}
}
}
index = -1;
while (++index < events.length) {
if (events[index][1].type === "attentionSequence") {
events[index][1].type = "data";
}
}
return events;
}
function tokenizeAttention(effects, ok3) {
const attentionMarkers2 = this.parser.constructs.attentionMarkers.null;
const previous2 = this.previous;
const before = classifyCharacter(previous2);
let marker;
return start;
function start(code2) {
marker = code2;
effects.enter("attentionSequence");
return inside(code2);
}
function inside(code2) {
if (code2 === marker) {
effects.consume(code2);
return inside;
}
const token = effects.exit("attentionSequence");
const after = classifyCharacter(code2);
const open = !after || after === 2 && before || attentionMarkers2.includes(code2);
const close = !before || before === 2 && after || attentionMarkers2.includes(previous2);
token._open = Boolean(marker === 42 ? open : open && (before || !close));
token._close = Boolean(marker === 42 ? close : close && (after || !open));
return ok3(code2);
}
}
function movePoint(point2, offset) {
point2.column += offset;
point2.offset += offset;
point2._bufferIndex += offset;
}
// node_modules/micromark-core-commonmark/lib/autolink.js
var autolink = {
name: "autolink",
tokenize: tokenizeAutolink
};
function tokenizeAutolink(effects, ok3, nok) {
let size = 0;
return start;
function start(code2) {
effects.enter("autolink");
effects.enter("autolinkMarker");
effects.consume(code2);
effects.exit("autolinkMarker");
effects.enter("autolinkProtocol");
return open;
}
function open(code2) {
if (asciiAlpha(code2)) {
effects.consume(code2);
return schemeOrEmailAtext;
}
if (code2 === 64) {
return nok(code2);
}
return emailAtext(code2);
}
function schemeOrEmailAtext(code2) {
if (code2 === 43 || code2 === 45 || code2 === 46 || asciiAlphanumeric(code2)) {
size = 1;
return schemeInsideOrEmailAtext(code2);
}
return emailAtext(code2);
}
function schemeInsideOrEmailAtext(code2) {
if (code2 === 58) {
effects.consume(code2);
size = 0;
return urlInside;
}
if ((code2 === 43 || code2 === 45 || code2 === 46 || asciiAlphanumeric(code2)) && size++ < 32) {
effects.consume(code2);
return schemeInsideOrEmailAtext;
}
size = 0;
return emailAtext(code2);
}
function urlInside(code2) {
if (code2 === 62) {
effects.exit("autolinkProtocol");
effects.enter("autolinkMarker");
effects.consume(code2);
effects.exit("autolinkMarker");
effects.exit("autolink");
return ok3;
}
if (code2 === null || code2 === 32 || code2 === 60 || asciiControl(code2)) {
return nok(code2);
}
effects.consume(code2);
return urlInside;
}
function emailAtext(code2) {
if (code2 === 64) {
effects.consume(code2);
return emailAtSignOrDot;
}
if (asciiAtext(code2)) {
effects.consume(code2);
return emailAtext;
}
return nok(code2);
}
function emailAtSignOrDot(code2) {
return asciiAlphanumeric(code2) ? emailLabel(code2) : nok(code2);
}
function emailLabel(code2) {
if (code2 === 46) {
effects.consume(code2);
size = 0;
return emailAtSignOrDot;
}
if (code2 === 62) {
effects.exit("autolinkProtocol").type = "autolinkEmail";
effects.enter("autolinkMarker");
effects.consume(code2);
effects.exit("autolinkMarker");
effects.exit("autolink");
return ok3;
}
return emailValue(code2);
}
function emailValue(code2) {
if ((code2 === 45 || asciiAlphanumeric(code2)) && size++ < 63) {
const next = code2 === 45 ? emailValue : emailLabel;
effects.consume(code2);
return next;
}
return nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/blank-line.js
var blankLine = {
partial: true,
tokenize: tokenizeBlankLine
};
function tokenizeBlankLine(effects, ok3, nok) {
return start;
function start(code2) {
return markdownSpace(code2) ? factorySpace(effects, after, "linePrefix")(code2) : after(code2);
}
function after(code2) {
return code2 === null || markdownLineEnding(code2) ? ok3(code2) : nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/block-quote.js
var blockQuote = {
continuation: {
tokenize: tokenizeBlockQuoteContinuation
},
exit,
name: "blockQuote",
tokenize: tokenizeBlockQuoteStart
};
function tokenizeBlockQuoteStart(effects, ok3, nok) {
const self2 = this;
return start;
function start(code2) {
if (code2 === 62) {
const state = self2.containerState;
if (!state.open) {
effects.enter("blockQuote", {
_container: true
});
state.open = true;
}
effects.enter("blockQuotePrefix");
effects.enter("blockQuoteMarker");
effects.consume(code2);
effects.exit("blockQuoteMarker");
return after;
}
return nok(code2);
}
function after(code2) {
if (markdownSpace(code2)) {
effects.enter("blockQuotePrefixWhitespace");
effects.consume(code2);
effects.exit("blockQuotePrefixWhitespace");
effects.exit("blockQuotePrefix");
return ok3;
}
effects.exit("blockQuotePrefix");
return ok3(code2);
}
}
function tokenizeBlockQuoteContinuation(effects, ok3, nok) {
const self2 = this;
return contStart;
function contStart(code2) {
if (markdownSpace(code2)) {
return factorySpace(effects, contBefore, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code2);
}
return contBefore(code2);
}
function contBefore(code2) {
return effects.attempt(blockQuote, ok3, nok)(code2);
}
}
function exit(effects) {
effects.exit("blockQuote");
}
// node_modules/micromark-core-commonmark/lib/character-escape.js
var characterEscape = {
name: "characterEscape",
tokenize: tokenizeCharacterEscape
};
function tokenizeCharacterEscape(effects, ok3, nok) {
return start;
function start(code2) {
effects.enter("characterEscape");
effects.enter("escapeMarker");
effects.consume(code2);
effects.exit("escapeMarker");
return inside;
}
function inside(code2) {
if (asciiPunctuation(code2)) {
effects.enter("characterEscapeValue");
effects.consume(code2);
effects.exit("characterEscapeValue");
effects.exit("characterEscape");
return ok3;
}
return nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/character-reference.js
var characterReference = {
name: "characterReference",
tokenize: tokenizeCharacterReference
};
function tokenizeCharacterReference(effects, ok3, nok) {
const self2 = this;
let size = 0;
let max;
let test;
return start;
function start(code2) {
effects.enter("characterReference");
effects.enter("characterReferenceMarker");
effects.consume(code2);
effects.exit("characterReferenceMarker");
return open;
}
function open(code2) {
if (code2 === 35) {
effects.enter("characterReferenceMarkerNumeric");
effects.consume(code2);
effects.exit("characterReferenceMarkerNumeric");
return numeric;
}
effects.enter("characterReferenceValue");
max = 31;
test = asciiAlphanumeric;
return value(code2);
}
function numeric(code2) {
if (code2 === 88 || code2 === 120) {
effects.enter("characterReferenceMarkerHexadecimal");
effects.consume(code2);
effects.exit("characterReferenceMarkerHexadecimal");
effects.enter("characterReferenceValue");
max = 6;
test = asciiHexDigit;
return value;
}
effects.enter("characterReferenceValue");
max = 7;
test = asciiDigit;
return value(code2);
}
function value(code2) {
if (code2 === 59 && size) {
const token = effects.exit("characterReferenceValue");
if (test === asciiAlphanumeric && !decodeNamedCharacterReference(self2.sliceSerialize(token))) {
return nok(code2);
}
effects.enter("characterReferenceMarker");
effects.consume(code2);
effects.exit("characterReferenceMarker");
effects.exit("characterReference");
return ok3;
}
if (test(code2) && size++ < max) {
effects.consume(code2);
return value;
}
return nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/code-fenced.js
var nonLazyContinuation = {
partial: true,
tokenize: tokenizeNonLazyContinuation
};
var codeFenced = {
concrete: true,
name: "codeFenced",
tokenize: tokenizeCodeFenced
};
function tokenizeCodeFenced(effects, ok3, nok) {
const self2 = this;
const closeStart = {
partial: true,
tokenize: tokenizeCloseStart
};
let initialPrefix = 0;
let sizeOpen = 0;
let marker;
return start;
function start(code2) {
return beforeSequenceOpen(code2);
}
function beforeSequenceOpen(code2) {
const tail = self2.events[self2.events.length - 1];
initialPrefix = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
marker = code2;
effects.enter("codeFenced");
effects.enter("codeFencedFence");
effects.enter("codeFencedFenceSequence");
return sequenceOpen(code2);
}
function sequenceOpen(code2) {
if (code2 === marker) {
sizeOpen++;
effects.consume(code2);
return sequenceOpen;
}
if (sizeOpen < 3) {
return nok(code2);
}
effects.exit("codeFencedFenceSequence");
return markdownSpace(code2) ? factorySpace(effects, infoBefore, "whitespace")(code2) : infoBefore(code2);
}
function infoBefore(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("codeFencedFence");
return self2.interrupt ? ok3(code2) : effects.check(nonLazyContinuation, atNonLazyBreak, after)(code2);
}
effects.enter("codeFencedFenceInfo");
effects.enter("chunkString", {
contentType: "string"
});
return info(code2);
}
function info(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("chunkString");
effects.exit("codeFencedFenceInfo");
return infoBefore(code2);
}
if (markdownSpace(code2)) {
effects.exit("chunkString");
effects.exit("codeFencedFenceInfo");
return factorySpace(effects, metaBefore, "whitespace")(code2);
}
if (code2 === 96 && code2 === marker) {
return nok(code2);
}
effects.consume(code2);
return info;
}
function metaBefore(code2) {
if (code2 === null || markdownLineEnding(code2)) {
return infoBefore(code2);
}
effects.enter("codeFencedFenceMeta");
effects.enter("chunkString", {
contentType: "string"
});
return meta(code2);
}
function meta(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("chunkString");
effects.exit("codeFencedFenceMeta");
return infoBefore(code2);
}
if (code2 === 96 && code2 === marker) {
return nok(code2);
}
effects.consume(code2);
return meta;
}
function atNonLazyBreak(code2) {
return effects.attempt(closeStart, after, contentBefore)(code2);
}
function contentBefore(code2) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return contentStart;
}
function contentStart(code2) {
return initialPrefix > 0 && markdownSpace(code2) ? factorySpace(effects, beforeContentChunk, "linePrefix", initialPrefix + 1)(code2) : beforeContentChunk(code2);
}
function beforeContentChunk(code2) {
if (code2 === null || markdownLineEnding(code2)) {
return effects.check(nonLazyContinuation, atNonLazyBreak, after)(code2);
}
effects.enter("codeFlowValue");
return contentChunk(code2);
}
function contentChunk(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("codeFlowValue");
return beforeContentChunk(code2);
}
effects.consume(code2);
return contentChunk;
}
function after(code2) {
effects.exit("codeFenced");
return ok3(code2);
}
function tokenizeCloseStart(effects2, ok4, nok2) {
let size = 0;
return startBefore;
function startBefore(code2) {
effects2.enter("lineEnding");
effects2.consume(code2);
effects2.exit("lineEnding");
return start2;
}
function start2(code2) {
effects2.enter("codeFencedFence");
return markdownSpace(code2) ? factorySpace(effects2, beforeSequenceClose, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code2) : beforeSequenceClose(code2);
}
function beforeSequenceClose(code2) {
if (code2 === marker) {
effects2.enter("codeFencedFenceSequence");
return sequenceClose(code2);
}
return nok2(code2);
}
function sequenceClose(code2) {
if (code2 === marker) {
size++;
effects2.consume(code2);
return sequenceClose;
}
if (size >= sizeOpen) {
effects2.exit("codeFencedFenceSequence");
return markdownSpace(code2) ? factorySpace(effects2, sequenceCloseAfter, "whitespace")(code2) : sequenceCloseAfter(code2);
}
return nok2(code2);
}
function sequenceCloseAfter(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects2.exit("codeFencedFence");
return ok4(code2);
}
return nok2(code2);
}
}
}
function tokenizeNonLazyContinuation(effects, ok3, nok) {
const self2 = this;
return start;
function start(code2) {
if (code2 === null) {
return nok(code2);
}
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return lineStart;
}
function lineStart(code2) {
return self2.parser.lazy[self2.now().line] ? nok(code2) : ok3(code2);
}
}
// node_modules/micromark-core-commonmark/lib/code-indented.js
var codeIndented = {
name: "codeIndented",
tokenize: tokenizeCodeIndented
};
var furtherStart = {
partial: true,
tokenize: tokenizeFurtherStart
};
function tokenizeCodeIndented(effects, ok3, nok) {
const self2 = this;
return start;
function start(code2) {
effects.enter("codeIndented");
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code2);
}
function afterPrefix(code2) {
const tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? atBreak(code2) : nok(code2);
}
function atBreak(code2) {
if (code2 === null) {
return after(code2);
}
if (markdownLineEnding(code2)) {
return effects.attempt(furtherStart, atBreak, after)(code2);
}
effects.enter("codeFlowValue");
return inside(code2);
}
function inside(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("codeFlowValue");
return atBreak(code2);
}
effects.consume(code2);
return inside;
}
function after(code2) {
effects.exit("codeIndented");
return ok3(code2);
}
}
function tokenizeFurtherStart(effects, ok3, nok) {
const self2 = this;
return furtherStart2;
function furtherStart2(code2) {
if (self2.parser.lazy[self2.now().line]) {
return nok(code2);
}
if (markdownLineEnding(code2)) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return furtherStart2;
}
return factorySpace(effects, afterPrefix, "linePrefix", 4 + 1)(code2);
}
function afterPrefix(code2) {
const tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4 ? ok3(code2) : markdownLineEnding(code2) ? furtherStart2(code2) : nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/code-text.js
var codeText = {
name: "codeText",
previous,
resolve: resolveCodeText,
tokenize: tokenizeCodeText
};
function resolveCodeText(events) {
let tailExitIndex = events.length - 4;
let headEnterIndex = 3;
let index;
let enter;
if ((events[headEnterIndex][1].type === "lineEnding" || events[headEnterIndex][1].type === "space") && (events[tailExitIndex][1].type === "lineEnding" || events[tailExitIndex][1].type === "space")) {
index = headEnterIndex;
while (++index < tailExitIndex) {
if (events[index][1].type === "codeTextData") {
events[headEnterIndex][1].type = "codeTextPadding";
events[tailExitIndex][1].type = "codeTextPadding";
headEnterIndex += 2;
tailExitIndex -= 2;
break;
}
}
}
index = headEnterIndex - 1;
tailExitIndex++;
while (++index <= tailExitIndex) {
if (enter === void 0) {
if (index !== tailExitIndex && events[index][1].type !== "lineEnding") {
enter = index;
}
} else if (index === tailExitIndex || events[index][1].type === "lineEnding") {
events[enter][1].type = "codeTextData";
if (index !== enter + 2) {
events[enter][1].end = events[index - 1][1].end;
events.splice(enter + 2, index - enter - 2);
tailExitIndex -= index - enter - 2;
index = enter + 2;
}
enter = void 0;
}
}
return events;
}
function previous(code2) {
return code2 !== 96 || this.events[this.events.length - 1][1].type === "characterEscape";
}
function tokenizeCodeText(effects, ok3, nok) {
const self2 = this;
let sizeOpen = 0;
let size;
let token;
return start;
function start(code2) {
effects.enter("codeText");
effects.enter("codeTextSequence");
return sequenceOpen(code2);
}
function sequenceOpen(code2) {
if (code2 === 96) {
effects.consume(code2);
sizeOpen++;
return sequenceOpen;
}
effects.exit("codeTextSequence");
return between(code2);
}
function between(code2) {
if (code2 === null) {
return nok(code2);
}
if (code2 === 32) {
effects.enter("space");
effects.consume(code2);
effects.exit("space");
return between;
}
if (code2 === 96) {
token = effects.enter("codeTextSequence");
size = 0;
return sequenceClose(code2);
}
if (markdownLineEnding(code2)) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return between;
}
effects.enter("codeTextData");
return data(code2);
}
function data(code2) {
if (code2 === null || code2 === 32 || code2 === 96 || markdownLineEnding(code2)) {
effects.exit("codeTextData");
return between(code2);
}
effects.consume(code2);
return data;
}
function sequenceClose(code2) {
if (code2 === 96) {
effects.consume(code2);
size++;
return sequenceClose;
}
if (size === sizeOpen) {
effects.exit("codeTextSequence");
effects.exit("codeText");
return ok3(code2);
}
token.type = "codeTextData";
return data(code2);
}
}
// node_modules/micromark-util-subtokenize/lib/splice-buffer.js
var SpliceBuffer = class {
/**
* @param {ReadonlyArray<T> | null | undefined} [initial]
* Initial items (optional).
* @returns
* Splice buffer.
*/
constructor(initial) {
this.left = initial ? [...initial] : [];
this.right = [];
}
/**
* Array access;
* does not move the cursor.
*
* @param {number} index
* Index.
* @return {T}
* Item.
*/
get(index) {
if (index < 0 || index >= this.left.length + this.right.length) {
throw new RangeError("Cannot access index `" + index + "` in a splice buffer of size `" + (this.left.length + this.right.length) + "`");
}
if (index < this.left.length) return this.left[index];
return this.right[this.right.length - index + this.left.length - 1];
}
/**
* The length of the splice buffer, one greater than the largest index in the
* array.
*/
get length() {
return this.left.length + this.right.length;
}
/**
* Remove and return `list[0]`;
* moves the cursor to `0`.
*
* @returns {T | undefined}
* Item, optional.
*/
shift() {
this.setCursor(0);
return this.right.pop();
}
/**
* Slice the buffer to get an array;
* does not move the cursor.
*
* @param {number} start
* Start.
* @param {number | null | undefined} [end]
* End (optional).
* @returns {Array<T>}
* Array of items.
*/
slice(start, end) {
const stop = end === null || end === void 0 ? Number.POSITIVE_INFINITY : end;
if (stop < this.left.length) {
return this.left.slice(start, stop);
}
if (start > this.left.length) {
return this.right.slice(this.right.length - stop + this.left.length, this.right.length - start + this.left.length).reverse();
}
return this.left.slice(start).concat(this.right.slice(this.right.length - stop + this.left.length).reverse());
}
/**
* Mimics the behavior of Array.prototype.splice() except for the change of
* interface necessary to avoid segfaults when patching in very large arrays.
*
* This operation moves cursor is moved to `start` and results in the cursor
* placed after any inserted items.
*
* @param {number} start
* Start;
* zero-based index at which to start changing the array;
* negative numbers count backwards from the end of the array and values
* that are out-of bounds are clamped to the appropriate end of the array.
* @param {number | null | undefined} [deleteCount=0]
* Delete count (default: `0`);
* maximum number of elements to delete, starting from start.
* @param {Array<T> | null | undefined} [items=[]]
* Items to include in place of the deleted items (default: `[]`).
* @return {Array<T>}
* Any removed items.
*/
splice(start, deleteCount, items) {
const count = deleteCount || 0;
this.setCursor(Math.trunc(start));
const removed = this.right.splice(this.right.length - count, Number.POSITIVE_INFINITY);
if (items) chunkedPush(this.left, items);
return removed.reverse();
}
/**
* Remove and return the highest-numbered item in the array, so
* `list[list.length - 1]`;
* Moves the cursor to `length`.
*
* @returns {T | undefined}
* Item, optional.
*/
pop() {
this.setCursor(Number.POSITIVE_INFINITY);
return this.left.pop();
}
/**
* Inserts a single item to the high-numbered side of the array;
* moves the cursor to `length`.
*
* @param {T} item
* Item.
* @returns {undefined}
* Nothing.
*/
push(item) {
this.setCursor(Number.POSITIVE_INFINITY);
this.left.push(item);
}
/**
* Inserts many items to the high-numbered side of the array.
* Moves the cursor to `length`.
*
* @param {Array<T>} items
* Items.
* @returns {undefined}
* Nothing.
*/
pushMany(items) {
this.setCursor(Number.POSITIVE_INFINITY);
chunkedPush(this.left, items);
}
/**
* Inserts a single item to the low-numbered side of the array;
* Moves the cursor to `0`.
*
* @param {T} item
* Item.
* @returns {undefined}
* Nothing.
*/
unshift(item) {
this.setCursor(0);
this.right.push(item);
}
/**
* Inserts many items to the low-numbered side of the array;
* moves the cursor to `0`.
*
* @param {Array<T>} items
* Items.
* @returns {undefined}
* Nothing.
*/
unshiftMany(items) {
this.setCursor(0);
chunkedPush(this.right, items.reverse());
}
/**
* Move the cursor to a specific position in the array. Requires
* time proportional to the distance moved.
*
* If `n < 0`, the cursor will end up at the beginning.
* If `n > length`, the cursor will end up at the end.
*
* @param {number} n
* Position.
* @return {undefined}
* Nothing.
*/
setCursor(n) {
if (n === this.left.length || n > this.left.length && this.right.length === 0 || n < 0 && this.left.length === 0) return;
if (n < this.left.length) {
const removed = this.left.splice(n, Number.POSITIVE_INFINITY);
chunkedPush(this.right, removed.reverse());
} else {
const removed = this.right.splice(this.left.length + this.right.length - n, Number.POSITIVE_INFINITY);
chunkedPush(this.left, removed.reverse());
}
}
};
function chunkedPush(list3, right) {
let chunkStart = 0;
if (right.length < 1e4) {
list3.push(...right);
} else {
while (chunkStart < right.length) {
list3.push(...right.slice(chunkStart, chunkStart + 1e4));
chunkStart += 1e4;
}
}
}
// node_modules/micromark-util-subtokenize/index.js
function subtokenize(eventsArray) {
const jumps = {};
let index = -1;
let event;
let lineIndex;
let otherIndex;
let otherEvent;
let parameters;
let subevents;
let more;
const events = new SpliceBuffer(eventsArray);
while (++index < events.length) {
while (index in jumps) {
index = jumps[index];
}
event = events.get(index);
if (index && event[1].type === "chunkFlow" && events.get(index - 1)[1].type === "listItemPrefix") {
subevents = event[1]._tokenizer.events;
otherIndex = 0;
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "lineEndingBlank") {
otherIndex += 2;
}
if (otherIndex < subevents.length && subevents[otherIndex][1].type === "content") {
while (++otherIndex < subevents.length) {
if (subevents[otherIndex][1].type === "content") {
break;
}
if (subevents[otherIndex][1].type === "chunkText") {
subevents[otherIndex][1]._isInFirstContentOfListItem = true;
otherIndex++;
}
}
}
}
if (event[0] === "enter") {
if (event[1].contentType) {
Object.assign(jumps, subcontent(events, index));
index = jumps[index];
more = true;
}
} else if (event[1]._container) {
otherIndex = index;
lineIndex = void 0;
while (otherIndex--) {
otherEvent = events.get(otherIndex);
if (otherEvent[1].type === "lineEnding" || otherEvent[1].type === "lineEndingBlank") {
if (otherEvent[0] === "enter") {
if (lineIndex) {
events.get(lineIndex)[1].type = "lineEndingBlank";
}
otherEvent[1].type = "lineEnding";
lineIndex = otherIndex;
}
} else if (otherEvent[1].type === "linePrefix" || otherEvent[1].type === "listItemIndent") {
} else {
break;
}
}
if (lineIndex) {
event[1].end = {
...events.get(lineIndex)[1].start
};
parameters = events.slice(lineIndex, index);
parameters.unshift(event);
events.splice(lineIndex, index - lineIndex + 1, parameters);
}
}
}
splice(eventsArray, 0, Number.POSITIVE_INFINITY, events.slice(0));
return !more;
}
function subcontent(events, eventIndex) {
const token = events.get(eventIndex)[1];
const context = events.get(eventIndex)[2];
let startPosition = eventIndex - 1;
const startPositions = [];
let tokenizer = token._tokenizer;
if (!tokenizer) {
tokenizer = context.parser[token.contentType](token.start);
if (token._contentTypeTextTrailing) {
tokenizer._contentTypeTextTrailing = true;
}
}
const childEvents = tokenizer.events;
const jumps = [];
const gaps = {};
let stream;
let previous2;
let index = -1;
let current = token;
let adjust = 0;
let start = 0;
const breaks = [start];
while (current) {
while (events.get(++startPosition)[1] !== current) {
}
startPositions.push(startPosition);
if (!current._tokenizer) {
stream = context.sliceStream(current);
if (!current.next) {
stream.push(null);
}
if (previous2) {
tokenizer.defineSkip(current.start);
}
if (current._isInFirstContentOfListItem) {
tokenizer._gfmTasklistFirstContentOfListItem = true;
}
tokenizer.write(stream);
if (current._isInFirstContentOfListItem) {
tokenizer._gfmTasklistFirstContentOfListItem = void 0;
}
}
previous2 = current;
current = current.next;
}
current = token;
while (++index < childEvents.length) {
if (
// Find a void token that includes a break.
childEvents[index][0] === "exit" && childEvents[index - 1][0] === "enter" && childEvents[index][1].type === childEvents[index - 1][1].type && childEvents[index][1].start.line !== childEvents[index][1].end.line
) {
start = index + 1;
breaks.push(start);
current._tokenizer = void 0;
current.previous = void 0;
current = current.next;
}
}
tokenizer.events = [];
if (current) {
current._tokenizer = void 0;
current.previous = void 0;
} else {
breaks.pop();
}
index = breaks.length;
while (index--) {
const slice = childEvents.slice(breaks[index], breaks[index + 1]);
const start2 = startPositions.pop();
jumps.push([start2, start2 + slice.length - 1]);
events.splice(start2, 2, slice);
}
jumps.reverse();
index = -1;
while (++index < jumps.length) {
gaps[adjust + jumps[index][0]] = adjust + jumps[index][1];
adjust += jumps[index][1] - jumps[index][0] - 1;
}
return gaps;
}
// node_modules/micromark-core-commonmark/lib/content.js
var content2 = {
resolve: resolveContent,
tokenize: tokenizeContent
};
var continuationConstruct = {
partial: true,
tokenize: tokenizeContinuation
};
function resolveContent(events) {
subtokenize(events);
return events;
}
function tokenizeContent(effects, ok3) {
let previous2;
return chunkStart;
function chunkStart(code2) {
effects.enter("content");
previous2 = effects.enter("chunkContent", {
contentType: "content"
});
return chunkInside(code2);
}
function chunkInside(code2) {
if (code2 === null) {
return contentEnd(code2);
}
if (markdownLineEnding(code2)) {
return effects.check(continuationConstruct, contentContinue, contentEnd)(code2);
}
effects.consume(code2);
return chunkInside;
}
function contentEnd(code2) {
effects.exit("chunkContent");
effects.exit("content");
return ok3(code2);
}
function contentContinue(code2) {
effects.consume(code2);
effects.exit("chunkContent");
previous2.next = effects.enter("chunkContent", {
contentType: "content",
previous: previous2
});
previous2 = previous2.next;
return chunkInside;
}
}
function tokenizeContinuation(effects, ok3, nok) {
const self2 = this;
return startLookahead;
function startLookahead(code2) {
effects.exit("chunkContent");
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return factorySpace(effects, prefixed, "linePrefix");
}
function prefixed(code2) {
if (code2 === null || markdownLineEnding(code2)) {
return nok(code2);
}
const tail = self2.events[self2.events.length - 1];
if (!self2.parser.constructs.disable.null.includes("codeIndented") && tail && tail[1].type === "linePrefix" && tail[2].sliceSerialize(tail[1], true).length >= 4) {
return ok3(code2);
}
return effects.interrupt(self2.parser.constructs.flow, nok, ok3)(code2);
}
}
// node_modules/micromark-factory-destination/index.js
function factoryDestination(effects, ok3, nok, type, literalType, literalMarkerType, rawType, stringType, max) {
const limit = max || Number.POSITIVE_INFINITY;
let balance = 0;
return start;
function start(code2) {
if (code2 === 60) {
effects.enter(type);
effects.enter(literalType);
effects.enter(literalMarkerType);
effects.consume(code2);
effects.exit(literalMarkerType);
return enclosedBefore;
}
if (code2 === null || code2 === 32 || code2 === 41 || asciiControl(code2)) {
return nok(code2);
}
effects.enter(type);
effects.enter(rawType);
effects.enter(stringType);
effects.enter("chunkString", {
contentType: "string"
});
return raw(code2);
}
function enclosedBefore(code2) {
if (code2 === 62) {
effects.enter(literalMarkerType);
effects.consume(code2);
effects.exit(literalMarkerType);
effects.exit(literalType);
effects.exit(type);
return ok3;
}
effects.enter(stringType);
effects.enter("chunkString", {
contentType: "string"
});
return enclosed(code2);
}
function enclosed(code2) {
if (code2 === 62) {
effects.exit("chunkString");
effects.exit(stringType);
return enclosedBefore(code2);
}
if (code2 === null || code2 === 60 || markdownLineEnding(code2)) {
return nok(code2);
}
effects.consume(code2);
return code2 === 92 ? enclosedEscape : enclosed;
}
function enclosedEscape(code2) {
if (code2 === 60 || code2 === 62 || code2 === 92) {
effects.consume(code2);
return enclosed;
}
return enclosed(code2);
}
function raw(code2) {
if (!balance && (code2 === null || code2 === 41 || markdownLineEndingOrSpace(code2))) {
effects.exit("chunkString");
effects.exit(stringType);
effects.exit(rawType);
effects.exit(type);
return ok3(code2);
}
if (balance < limit && code2 === 40) {
effects.consume(code2);
balance++;
return raw;
}
if (code2 === 41) {
effects.consume(code2);
balance--;
return raw;
}
if (code2 === null || code2 === 32 || code2 === 40 || asciiControl(code2)) {
return nok(code2);
}
effects.consume(code2);
return code2 === 92 ? rawEscape : raw;
}
function rawEscape(code2) {
if (code2 === 40 || code2 === 41 || code2 === 92) {
effects.consume(code2);
return raw;
}
return raw(code2);
}
}
// node_modules/micromark-factory-label/index.js
function factoryLabel(effects, ok3, nok, type, markerType, stringType) {
const self2 = this;
let size = 0;
let seen;
return start;
function start(code2) {
effects.enter(type);
effects.enter(markerType);
effects.consume(code2);
effects.exit(markerType);
effects.enter(stringType);
return atBreak;
}
function atBreak(code2) {
if (size > 999 || code2 === null || code2 === 91 || code2 === 93 && !seen || // To do: remove in the future once weve switched from
// `micromark-extension-footnote` to `micromark-extension-gfm-footnote`,
// which doesnt need this.
// Hidden footnotes hook.
/* c8 ignore next 3 */
code2 === 94 && !size && "_hiddenFootnoteSupport" in self2.parser.constructs) {
return nok(code2);
}
if (code2 === 93) {
effects.exit(stringType);
effects.enter(markerType);
effects.consume(code2);
effects.exit(markerType);
effects.exit(type);
return ok3;
}
if (markdownLineEnding(code2)) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return atBreak;
}
effects.enter("chunkString", {
contentType: "string"
});
return labelInside(code2);
}
function labelInside(code2) {
if (code2 === null || code2 === 91 || code2 === 93 || markdownLineEnding(code2) || size++ > 999) {
effects.exit("chunkString");
return atBreak(code2);
}
effects.consume(code2);
if (!seen) seen = !markdownSpace(code2);
return code2 === 92 ? labelEscape : labelInside;
}
function labelEscape(code2) {
if (code2 === 91 || code2 === 92 || code2 === 93) {
effects.consume(code2);
size++;
return labelInside;
}
return labelInside(code2);
}
}
// node_modules/micromark-factory-title/index.js
function factoryTitle(effects, ok3, nok, type, markerType, stringType) {
let marker;
return start;
function start(code2) {
if (code2 === 34 || code2 === 39 || code2 === 40) {
effects.enter(type);
effects.enter(markerType);
effects.consume(code2);
effects.exit(markerType);
marker = code2 === 40 ? 41 : code2;
return begin;
}
return nok(code2);
}
function begin(code2) {
if (code2 === marker) {
effects.enter(markerType);
effects.consume(code2);
effects.exit(markerType);
effects.exit(type);
return ok3;
}
effects.enter(stringType);
return atBreak(code2);
}
function atBreak(code2) {
if (code2 === marker) {
effects.exit(stringType);
return begin(marker);
}
if (code2 === null) {
return nok(code2);
}
if (markdownLineEnding(code2)) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return factorySpace(effects, atBreak, "linePrefix");
}
effects.enter("chunkString", {
contentType: "string"
});
return inside(code2);
}
function inside(code2) {
if (code2 === marker || code2 === null || markdownLineEnding(code2)) {
effects.exit("chunkString");
return atBreak(code2);
}
effects.consume(code2);
return code2 === 92 ? escape : inside;
}
function escape(code2) {
if (code2 === marker || code2 === 92) {
effects.consume(code2);
return inside;
}
return inside(code2);
}
}
// node_modules/micromark-factory-whitespace/index.js
function factoryWhitespace(effects, ok3) {
let seen;
return start;
function start(code2) {
if (markdownLineEnding(code2)) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
seen = true;
return start;
}
if (markdownSpace(code2)) {
return factorySpace(effects, start, seen ? "linePrefix" : "lineSuffix")(code2);
}
return ok3(code2);
}
}
// node_modules/micromark-core-commonmark/lib/definition.js
var definition = {
name: "definition",
tokenize: tokenizeDefinition
};
var titleBefore = {
partial: true,
tokenize: tokenizeTitleBefore
};
function tokenizeDefinition(effects, ok3, nok) {
const self2 = this;
let identifier;
return start;
function start(code2) {
effects.enter("definition");
return before(code2);
}
function before(code2) {
return factoryLabel.call(
self2,
effects,
labelAfter,
// Note: we dont need to reset the way `markdown-rs` does.
nok,
"definitionLabel",
"definitionLabelMarker",
"definitionLabelString"
)(code2);
}
function labelAfter(code2) {
identifier = normalizeIdentifier(self2.sliceSerialize(self2.events[self2.events.length - 1][1]).slice(1, -1));
if (code2 === 58) {
effects.enter("definitionMarker");
effects.consume(code2);
effects.exit("definitionMarker");
return markerAfter;
}
return nok(code2);
}
function markerAfter(code2) {
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, destinationBefore)(code2) : destinationBefore(code2);
}
function destinationBefore(code2) {
return factoryDestination(
effects,
destinationAfter,
// Note: we dont need to reset the way `markdown-rs` does.
nok,
"definitionDestination",
"definitionDestinationLiteral",
"definitionDestinationLiteralMarker",
"definitionDestinationRaw",
"definitionDestinationString"
)(code2);
}
function destinationAfter(code2) {
return effects.attempt(titleBefore, after, after)(code2);
}
function after(code2) {
return markdownSpace(code2) ? factorySpace(effects, afterWhitespace, "whitespace")(code2) : afterWhitespace(code2);
}
function afterWhitespace(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("definition");
self2.parser.defined.push(identifier);
return ok3(code2);
}
return nok(code2);
}
}
function tokenizeTitleBefore(effects, ok3, nok) {
return titleBefore2;
function titleBefore2(code2) {
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, beforeMarker)(code2) : nok(code2);
}
function beforeMarker(code2) {
return factoryTitle(effects, titleAfter, nok, "definitionTitle", "definitionTitleMarker", "definitionTitleString")(code2);
}
function titleAfter(code2) {
return markdownSpace(code2) ? factorySpace(effects, titleAfterOptionalWhitespace, "whitespace")(code2) : titleAfterOptionalWhitespace(code2);
}
function titleAfterOptionalWhitespace(code2) {
return code2 === null || markdownLineEnding(code2) ? ok3(code2) : nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/hard-break-escape.js
var hardBreakEscape = {
name: "hardBreakEscape",
tokenize: tokenizeHardBreakEscape
};
function tokenizeHardBreakEscape(effects, ok3, nok) {
return start;
function start(code2) {
effects.enter("hardBreakEscape");
effects.consume(code2);
return after;
}
function after(code2) {
if (markdownLineEnding(code2)) {
effects.exit("hardBreakEscape");
return ok3(code2);
}
return nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/heading-atx.js
var headingAtx = {
name: "headingAtx",
resolve: resolveHeadingAtx,
tokenize: tokenizeHeadingAtx
};
function resolveHeadingAtx(events, context) {
let contentEnd = events.length - 2;
let contentStart = 3;
let content3;
let text4;
if (events[contentStart][1].type === "whitespace") {
contentStart += 2;
}
if (contentEnd - 2 > contentStart && events[contentEnd][1].type === "whitespace") {
contentEnd -= 2;
}
if (events[contentEnd][1].type === "atxHeadingSequence" && (contentStart === contentEnd - 1 || contentEnd - 4 > contentStart && events[contentEnd - 2][1].type === "whitespace")) {
contentEnd -= contentStart + 1 === contentEnd ? 2 : 4;
}
if (contentEnd > contentStart) {
content3 = {
type: "atxHeadingText",
start: events[contentStart][1].start,
end: events[contentEnd][1].end
};
text4 = {
type: "chunkText",
start: events[contentStart][1].start,
end: events[contentEnd][1].end,
contentType: "text"
};
splice(events, contentStart, contentEnd - contentStart + 1, [["enter", content3, context], ["enter", text4, context], ["exit", text4, context], ["exit", content3, context]]);
}
return events;
}
function tokenizeHeadingAtx(effects, ok3, nok) {
let size = 0;
return start;
function start(code2) {
effects.enter("atxHeading");
return before(code2);
}
function before(code2) {
effects.enter("atxHeadingSequence");
return sequenceOpen(code2);
}
function sequenceOpen(code2) {
if (code2 === 35 && size++ < 6) {
effects.consume(code2);
return sequenceOpen;
}
if (code2 === null || markdownLineEndingOrSpace(code2)) {
effects.exit("atxHeadingSequence");
return atBreak(code2);
}
return nok(code2);
}
function atBreak(code2) {
if (code2 === 35) {
effects.enter("atxHeadingSequence");
return sequenceFurther(code2);
}
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("atxHeading");
return ok3(code2);
}
if (markdownSpace(code2)) {
return factorySpace(effects, atBreak, "whitespace")(code2);
}
effects.enter("atxHeadingText");
return data(code2);
}
function sequenceFurther(code2) {
if (code2 === 35) {
effects.consume(code2);
return sequenceFurther;
}
effects.exit("atxHeadingSequence");
return atBreak(code2);
}
function data(code2) {
if (code2 === null || code2 === 35 || markdownLineEndingOrSpace(code2)) {
effects.exit("atxHeadingText");
return atBreak(code2);
}
effects.consume(code2);
return data;
}
}
// node_modules/micromark-util-html-tag-name/index.js
var htmlBlockNames = [
"address",
"article",
"aside",
"base",
"basefont",
"blockquote",
"body",
"caption",
"center",
"col",
"colgroup",
"dd",
"details",
"dialog",
"dir",
"div",
"dl",
"dt",
"fieldset",
"figcaption",
"figure",
"footer",
"form",
"frame",
"frameset",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"head",
"header",
"hr",
"html",
"iframe",
"legend",
"li",
"link",
"main",
"menu",
"menuitem",
"nav",
"noframes",
"ol",
"optgroup",
"option",
"p",
"param",
"search",
"section",
"summary",
"table",
"tbody",
"td",
"tfoot",
"th",
"thead",
"title",
"tr",
"track",
"ul"
];
var htmlRawNames = ["pre", "script", "style", "textarea"];
// node_modules/micromark-core-commonmark/lib/html-flow.js
var htmlFlow = {
concrete: true,
name: "htmlFlow",
resolveTo: resolveToHtmlFlow,
tokenize: tokenizeHtmlFlow
};
var blankLineBefore = {
partial: true,
tokenize: tokenizeBlankLineBefore
};
var nonLazyContinuationStart = {
partial: true,
tokenize: tokenizeNonLazyContinuationStart
};
function resolveToHtmlFlow(events) {
let index = events.length;
while (index--) {
if (events[index][0] === "enter" && events[index][1].type === "htmlFlow") {
break;
}
}
if (index > 1 && events[index - 2][1].type === "linePrefix") {
events[index][1].start = events[index - 2][1].start;
events[index + 1][1].start = events[index - 2][1].start;
events.splice(index - 2, 2);
}
return events;
}
function tokenizeHtmlFlow(effects, ok3, nok) {
const self2 = this;
let marker;
let closingTag;
let buffer;
let index;
let markerB;
return start;
function start(code2) {
return before(code2);
}
function before(code2) {
effects.enter("htmlFlow");
effects.enter("htmlFlowData");
effects.consume(code2);
return open;
}
function open(code2) {
if (code2 === 33) {
effects.consume(code2);
return declarationOpen;
}
if (code2 === 47) {
effects.consume(code2);
closingTag = true;
return tagCloseStart;
}
if (code2 === 63) {
effects.consume(code2);
marker = 3;
return self2.interrupt ? ok3 : continuationDeclarationInside;
}
if (asciiAlpha(code2)) {
effects.consume(code2);
buffer = String.fromCharCode(code2);
return tagName;
}
return nok(code2);
}
function declarationOpen(code2) {
if (code2 === 45) {
effects.consume(code2);
marker = 2;
return commentOpenInside;
}
if (code2 === 91) {
effects.consume(code2);
marker = 5;
index = 0;
return cdataOpenInside;
}
if (asciiAlpha(code2)) {
effects.consume(code2);
marker = 4;
return self2.interrupt ? ok3 : continuationDeclarationInside;
}
return nok(code2);
}
function commentOpenInside(code2) {
if (code2 === 45) {
effects.consume(code2);
return self2.interrupt ? ok3 : continuationDeclarationInside;
}
return nok(code2);
}
function cdataOpenInside(code2) {
const value = "CDATA[";
if (code2 === value.charCodeAt(index++)) {
effects.consume(code2);
if (index === value.length) {
return self2.interrupt ? ok3 : continuation;
}
return cdataOpenInside;
}
return nok(code2);
}
function tagCloseStart(code2) {
if (asciiAlpha(code2)) {
effects.consume(code2);
buffer = String.fromCharCode(code2);
return tagName;
}
return nok(code2);
}
function tagName(code2) {
if (code2 === null || code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
const slash = code2 === 47;
const name = buffer.toLowerCase();
if (!slash && !closingTag && htmlRawNames.includes(name)) {
marker = 1;
return self2.interrupt ? ok3(code2) : continuation(code2);
}
if (htmlBlockNames.includes(buffer.toLowerCase())) {
marker = 6;
if (slash) {
effects.consume(code2);
return basicSelfClosing;
}
return self2.interrupt ? ok3(code2) : continuation(code2);
}
marker = 7;
return self2.interrupt && !self2.parser.lazy[self2.now().line] ? nok(code2) : closingTag ? completeClosingTagAfter(code2) : completeAttributeNameBefore(code2);
}
if (code2 === 45 || asciiAlphanumeric(code2)) {
effects.consume(code2);
buffer += String.fromCharCode(code2);
return tagName;
}
return nok(code2);
}
function basicSelfClosing(code2) {
if (code2 === 62) {
effects.consume(code2);
return self2.interrupt ? ok3 : continuation;
}
return nok(code2);
}
function completeClosingTagAfter(code2) {
if (markdownSpace(code2)) {
effects.consume(code2);
return completeClosingTagAfter;
}
return completeEnd(code2);
}
function completeAttributeNameBefore(code2) {
if (code2 === 47) {
effects.consume(code2);
return completeEnd;
}
if (code2 === 58 || code2 === 95 || asciiAlpha(code2)) {
effects.consume(code2);
return completeAttributeName;
}
if (markdownSpace(code2)) {
effects.consume(code2);
return completeAttributeNameBefore;
}
return completeEnd(code2);
}
function completeAttributeName(code2) {
if (code2 === 45 || code2 === 46 || code2 === 58 || code2 === 95 || asciiAlphanumeric(code2)) {
effects.consume(code2);
return completeAttributeName;
}
return completeAttributeNameAfter(code2);
}
function completeAttributeNameAfter(code2) {
if (code2 === 61) {
effects.consume(code2);
return completeAttributeValueBefore;
}
if (markdownSpace(code2)) {
effects.consume(code2);
return completeAttributeNameAfter;
}
return completeAttributeNameBefore(code2);
}
function completeAttributeValueBefore(code2) {
if (code2 === null || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96) {
return nok(code2);
}
if (code2 === 34 || code2 === 39) {
effects.consume(code2);
markerB = code2;
return completeAttributeValueQuoted;
}
if (markdownSpace(code2)) {
effects.consume(code2);
return completeAttributeValueBefore;
}
return completeAttributeValueUnquoted(code2);
}
function completeAttributeValueQuoted(code2) {
if (code2 === markerB) {
effects.consume(code2);
markerB = null;
return completeAttributeValueQuotedAfter;
}
if (code2 === null || markdownLineEnding(code2)) {
return nok(code2);
}
effects.consume(code2);
return completeAttributeValueQuoted;
}
function completeAttributeValueUnquoted(code2) {
if (code2 === null || code2 === 34 || code2 === 39 || code2 === 47 || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96 || markdownLineEndingOrSpace(code2)) {
return completeAttributeNameAfter(code2);
}
effects.consume(code2);
return completeAttributeValueUnquoted;
}
function completeAttributeValueQuotedAfter(code2) {
if (code2 === 47 || code2 === 62 || markdownSpace(code2)) {
return completeAttributeNameBefore(code2);
}
return nok(code2);
}
function completeEnd(code2) {
if (code2 === 62) {
effects.consume(code2);
return completeAfter;
}
return nok(code2);
}
function completeAfter(code2) {
if (code2 === null || markdownLineEnding(code2)) {
return continuation(code2);
}
if (markdownSpace(code2)) {
effects.consume(code2);
return completeAfter;
}
return nok(code2);
}
function continuation(code2) {
if (code2 === 45 && marker === 2) {
effects.consume(code2);
return continuationCommentInside;
}
if (code2 === 60 && marker === 1) {
effects.consume(code2);
return continuationRawTagOpen;
}
if (code2 === 62 && marker === 4) {
effects.consume(code2);
return continuationClose;
}
if (code2 === 63 && marker === 3) {
effects.consume(code2);
return continuationDeclarationInside;
}
if (code2 === 93 && marker === 5) {
effects.consume(code2);
return continuationCdataInside;
}
if (markdownLineEnding(code2) && (marker === 6 || marker === 7)) {
effects.exit("htmlFlowData");
return effects.check(blankLineBefore, continuationAfter, continuationStart)(code2);
}
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("htmlFlowData");
return continuationStart(code2);
}
effects.consume(code2);
return continuation;
}
function continuationStart(code2) {
return effects.check(nonLazyContinuationStart, continuationStartNonLazy, continuationAfter)(code2);
}
function continuationStartNonLazy(code2) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return continuationBefore;
}
function continuationBefore(code2) {
if (code2 === null || markdownLineEnding(code2)) {
return continuationStart(code2);
}
effects.enter("htmlFlowData");
return continuation(code2);
}
function continuationCommentInside(code2) {
if (code2 === 45) {
effects.consume(code2);
return continuationDeclarationInside;
}
return continuation(code2);
}
function continuationRawTagOpen(code2) {
if (code2 === 47) {
effects.consume(code2);
buffer = "";
return continuationRawEndTag;
}
return continuation(code2);
}
function continuationRawEndTag(code2) {
if (code2 === 62) {
const name = buffer.toLowerCase();
if (htmlRawNames.includes(name)) {
effects.consume(code2);
return continuationClose;
}
return continuation(code2);
}
if (asciiAlpha(code2) && buffer.length < 8) {
effects.consume(code2);
buffer += String.fromCharCode(code2);
return continuationRawEndTag;
}
return continuation(code2);
}
function continuationCdataInside(code2) {
if (code2 === 93) {
effects.consume(code2);
return continuationDeclarationInside;
}
return continuation(code2);
}
function continuationDeclarationInside(code2) {
if (code2 === 62) {
effects.consume(code2);
return continuationClose;
}
if (code2 === 45 && marker === 2) {
effects.consume(code2);
return continuationDeclarationInside;
}
return continuation(code2);
}
function continuationClose(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("htmlFlowData");
return continuationAfter(code2);
}
effects.consume(code2);
return continuationClose;
}
function continuationAfter(code2) {
effects.exit("htmlFlow");
return ok3(code2);
}
}
function tokenizeNonLazyContinuationStart(effects, ok3, nok) {
const self2 = this;
return start;
function start(code2) {
if (markdownLineEnding(code2)) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return after;
}
return nok(code2);
}
function after(code2) {
return self2.parser.lazy[self2.now().line] ? nok(code2) : ok3(code2);
}
}
function tokenizeBlankLineBefore(effects, ok3, nok) {
return start;
function start(code2) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return effects.attempt(blankLine, ok3, nok);
}
}
// node_modules/micromark-core-commonmark/lib/html-text.js
var htmlText = {
name: "htmlText",
tokenize: tokenizeHtmlText
};
function tokenizeHtmlText(effects, ok3, nok) {
const self2 = this;
let marker;
let index;
let returnState;
return start;
function start(code2) {
effects.enter("htmlText");
effects.enter("htmlTextData");
effects.consume(code2);
return open;
}
function open(code2) {
if (code2 === 33) {
effects.consume(code2);
return declarationOpen;
}
if (code2 === 47) {
effects.consume(code2);
return tagCloseStart;
}
if (code2 === 63) {
effects.consume(code2);
return instruction;
}
if (asciiAlpha(code2)) {
effects.consume(code2);
return tagOpen;
}
return nok(code2);
}
function declarationOpen(code2) {
if (code2 === 45) {
effects.consume(code2);
return commentOpenInside;
}
if (code2 === 91) {
effects.consume(code2);
index = 0;
return cdataOpenInside;
}
if (asciiAlpha(code2)) {
effects.consume(code2);
return declaration;
}
return nok(code2);
}
function commentOpenInside(code2) {
if (code2 === 45) {
effects.consume(code2);
return commentEnd;
}
return nok(code2);
}
function comment(code2) {
if (code2 === null) {
return nok(code2);
}
if (code2 === 45) {
effects.consume(code2);
return commentClose;
}
if (markdownLineEnding(code2)) {
returnState = comment;
return lineEndingBefore(code2);
}
effects.consume(code2);
return comment;
}
function commentClose(code2) {
if (code2 === 45) {
effects.consume(code2);
return commentEnd;
}
return comment(code2);
}
function commentEnd(code2) {
return code2 === 62 ? end(code2) : code2 === 45 ? commentClose(code2) : comment(code2);
}
function cdataOpenInside(code2) {
const value = "CDATA[";
if (code2 === value.charCodeAt(index++)) {
effects.consume(code2);
return index === value.length ? cdata : cdataOpenInside;
}
return nok(code2);
}
function cdata(code2) {
if (code2 === null) {
return nok(code2);
}
if (code2 === 93) {
effects.consume(code2);
return cdataClose;
}
if (markdownLineEnding(code2)) {
returnState = cdata;
return lineEndingBefore(code2);
}
effects.consume(code2);
return cdata;
}
function cdataClose(code2) {
if (code2 === 93) {
effects.consume(code2);
return cdataEnd;
}
return cdata(code2);
}
function cdataEnd(code2) {
if (code2 === 62) {
return end(code2);
}
if (code2 === 93) {
effects.consume(code2);
return cdataEnd;
}
return cdata(code2);
}
function declaration(code2) {
if (code2 === null || code2 === 62) {
return end(code2);
}
if (markdownLineEnding(code2)) {
returnState = declaration;
return lineEndingBefore(code2);
}
effects.consume(code2);
return declaration;
}
function instruction(code2) {
if (code2 === null) {
return nok(code2);
}
if (code2 === 63) {
effects.consume(code2);
return instructionClose;
}
if (markdownLineEnding(code2)) {
returnState = instruction;
return lineEndingBefore(code2);
}
effects.consume(code2);
return instruction;
}
function instructionClose(code2) {
return code2 === 62 ? end(code2) : instruction(code2);
}
function tagCloseStart(code2) {
if (asciiAlpha(code2)) {
effects.consume(code2);
return tagClose;
}
return nok(code2);
}
function tagClose(code2) {
if (code2 === 45 || asciiAlphanumeric(code2)) {
effects.consume(code2);
return tagClose;
}
return tagCloseBetween(code2);
}
function tagCloseBetween(code2) {
if (markdownLineEnding(code2)) {
returnState = tagCloseBetween;
return lineEndingBefore(code2);
}
if (markdownSpace(code2)) {
effects.consume(code2);
return tagCloseBetween;
}
return end(code2);
}
function tagOpen(code2) {
if (code2 === 45 || asciiAlphanumeric(code2)) {
effects.consume(code2);
return tagOpen;
}
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
return tagOpenBetween(code2);
}
return nok(code2);
}
function tagOpenBetween(code2) {
if (code2 === 47) {
effects.consume(code2);
return end;
}
if (code2 === 58 || code2 === 95 || asciiAlpha(code2)) {
effects.consume(code2);
return tagOpenAttributeName;
}
if (markdownLineEnding(code2)) {
returnState = tagOpenBetween;
return lineEndingBefore(code2);
}
if (markdownSpace(code2)) {
effects.consume(code2);
return tagOpenBetween;
}
return end(code2);
}
function tagOpenAttributeName(code2) {
if (code2 === 45 || code2 === 46 || code2 === 58 || code2 === 95 || asciiAlphanumeric(code2)) {
effects.consume(code2);
return tagOpenAttributeName;
}
return tagOpenAttributeNameAfter(code2);
}
function tagOpenAttributeNameAfter(code2) {
if (code2 === 61) {
effects.consume(code2);
return tagOpenAttributeValueBefore;
}
if (markdownLineEnding(code2)) {
returnState = tagOpenAttributeNameAfter;
return lineEndingBefore(code2);
}
if (markdownSpace(code2)) {
effects.consume(code2);
return tagOpenAttributeNameAfter;
}
return tagOpenBetween(code2);
}
function tagOpenAttributeValueBefore(code2) {
if (code2 === null || code2 === 60 || code2 === 61 || code2 === 62 || code2 === 96) {
return nok(code2);
}
if (code2 === 34 || code2 === 39) {
effects.consume(code2);
marker = code2;
return tagOpenAttributeValueQuoted;
}
if (markdownLineEnding(code2)) {
returnState = tagOpenAttributeValueBefore;
return lineEndingBefore(code2);
}
if (markdownSpace(code2)) {
effects.consume(code2);
return tagOpenAttributeValueBefore;
}
effects.consume(code2);
return tagOpenAttributeValueUnquoted;
}
function tagOpenAttributeValueQuoted(code2) {
if (code2 === marker) {
effects.consume(code2);
marker = void 0;
return tagOpenAttributeValueQuotedAfter;
}
if (code2 === null) {
return nok(code2);
}
if (markdownLineEnding(code2)) {
returnState = tagOpenAttributeValueQuoted;
return lineEndingBefore(code2);
}
effects.consume(code2);
return tagOpenAttributeValueQuoted;
}
function tagOpenAttributeValueUnquoted(code2) {
if (code2 === null || code2 === 34 || code2 === 39 || code2 === 60 || code2 === 61 || code2 === 96) {
return nok(code2);
}
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
return tagOpenBetween(code2);
}
effects.consume(code2);
return tagOpenAttributeValueUnquoted;
}
function tagOpenAttributeValueQuotedAfter(code2) {
if (code2 === 47 || code2 === 62 || markdownLineEndingOrSpace(code2)) {
return tagOpenBetween(code2);
}
return nok(code2);
}
function end(code2) {
if (code2 === 62) {
effects.consume(code2);
effects.exit("htmlTextData");
effects.exit("htmlText");
return ok3;
}
return nok(code2);
}
function lineEndingBefore(code2) {
effects.exit("htmlTextData");
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return lineEndingAfter;
}
function lineEndingAfter(code2) {
return markdownSpace(code2) ? factorySpace(effects, lineEndingAfterPrefix, "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code2) : lineEndingAfterPrefix(code2);
}
function lineEndingAfterPrefix(code2) {
effects.enter("htmlTextData");
return returnState(code2);
}
}
// node_modules/micromark-core-commonmark/lib/label-end.js
var labelEnd = {
name: "labelEnd",
resolveAll: resolveAllLabelEnd,
resolveTo: resolveToLabelEnd,
tokenize: tokenizeLabelEnd
};
var resourceConstruct = {
tokenize: tokenizeResource
};
var referenceFullConstruct = {
tokenize: tokenizeReferenceFull
};
var referenceCollapsedConstruct = {
tokenize: tokenizeReferenceCollapsed
};
function resolveAllLabelEnd(events) {
let index = -1;
const newEvents = [];
while (++index < events.length) {
const token = events[index][1];
newEvents.push(events[index]);
if (token.type === "labelImage" || token.type === "labelLink" || token.type === "labelEnd") {
const offset = token.type === "labelImage" ? 4 : 2;
token.type = "data";
index += offset;
}
}
if (events.length !== newEvents.length) {
splice(events, 0, events.length, newEvents);
}
return events;
}
function resolveToLabelEnd(events, context) {
let index = events.length;
let offset = 0;
let token;
let open;
let close;
let media;
while (index--) {
token = events[index][1];
if (open) {
if (token.type === "link" || token.type === "labelLink" && token._inactive) {
break;
}
if (events[index][0] === "enter" && token.type === "labelLink") {
token._inactive = true;
}
} else if (close) {
if (events[index][0] === "enter" && (token.type === "labelImage" || token.type === "labelLink") && !token._balanced) {
open = index;
if (token.type !== "labelLink") {
offset = 2;
break;
}
}
} else if (token.type === "labelEnd") {
close = index;
}
}
const group = {
type: events[open][1].type === "labelLink" ? "link" : "image",
start: {
...events[open][1].start
},
end: {
...events[events.length - 1][1].end
}
};
const label = {
type: "label",
start: {
...events[open][1].start
},
end: {
...events[close][1].end
}
};
const text4 = {
type: "labelText",
start: {
...events[open + offset + 2][1].end
},
end: {
...events[close - 2][1].start
}
};
media = [["enter", group, context], ["enter", label, context]];
media = push(media, events.slice(open + 1, open + offset + 3));
media = push(media, [["enter", text4, context]]);
media = push(media, resolveAll(context.parser.constructs.insideSpan.null, events.slice(open + offset + 4, close - 3), context));
media = push(media, [["exit", text4, context], events[close - 2], events[close - 1], ["exit", label, context]]);
media = push(media, events.slice(close + 1));
media = push(media, [["exit", group, context]]);
splice(events, open, events.length, media);
return events;
}
function tokenizeLabelEnd(effects, ok3, nok) {
const self2 = this;
let index = self2.events.length;
let labelStart;
let defined;
while (index--) {
if ((self2.events[index][1].type === "labelImage" || self2.events[index][1].type === "labelLink") && !self2.events[index][1]._balanced) {
labelStart = self2.events[index][1];
break;
}
}
return start;
function start(code2) {
if (!labelStart) {
return nok(code2);
}
if (labelStart._inactive) {
return labelEndNok(code2);
}
defined = self2.parser.defined.includes(normalizeIdentifier(self2.sliceSerialize({
start: labelStart.end,
end: self2.now()
})));
effects.enter("labelEnd");
effects.enter("labelMarker");
effects.consume(code2);
effects.exit("labelMarker");
effects.exit("labelEnd");
return after;
}
function after(code2) {
if (code2 === 40) {
return effects.attempt(resourceConstruct, labelEndOk, defined ? labelEndOk : labelEndNok)(code2);
}
if (code2 === 91) {
return effects.attempt(referenceFullConstruct, labelEndOk, defined ? referenceNotFull : labelEndNok)(code2);
}
return defined ? labelEndOk(code2) : labelEndNok(code2);
}
function referenceNotFull(code2) {
return effects.attempt(referenceCollapsedConstruct, labelEndOk, labelEndNok)(code2);
}
function labelEndOk(code2) {
return ok3(code2);
}
function labelEndNok(code2) {
labelStart._balanced = true;
return nok(code2);
}
}
function tokenizeResource(effects, ok3, nok) {
return resourceStart;
function resourceStart(code2) {
effects.enter("resource");
effects.enter("resourceMarker");
effects.consume(code2);
effects.exit("resourceMarker");
return resourceBefore;
}
function resourceBefore(code2) {
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceOpen)(code2) : resourceOpen(code2);
}
function resourceOpen(code2) {
if (code2 === 41) {
return resourceEnd(code2);
}
return factoryDestination(effects, resourceDestinationAfter, resourceDestinationMissing, "resourceDestination", "resourceDestinationLiteral", "resourceDestinationLiteralMarker", "resourceDestinationRaw", "resourceDestinationString", 32)(code2);
}
function resourceDestinationAfter(code2) {
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceBetween)(code2) : resourceEnd(code2);
}
function resourceDestinationMissing(code2) {
return nok(code2);
}
function resourceBetween(code2) {
if (code2 === 34 || code2 === 39 || code2 === 40) {
return factoryTitle(effects, resourceTitleAfter, nok, "resourceTitle", "resourceTitleMarker", "resourceTitleString")(code2);
}
return resourceEnd(code2);
}
function resourceTitleAfter(code2) {
return markdownLineEndingOrSpace(code2) ? factoryWhitespace(effects, resourceEnd)(code2) : resourceEnd(code2);
}
function resourceEnd(code2) {
if (code2 === 41) {
effects.enter("resourceMarker");
effects.consume(code2);
effects.exit("resourceMarker");
effects.exit("resource");
return ok3;
}
return nok(code2);
}
}
function tokenizeReferenceFull(effects, ok3, nok) {
const self2 = this;
return referenceFull;
function referenceFull(code2) {
return factoryLabel.call(self2, effects, referenceFullAfter, referenceFullMissing, "reference", "referenceMarker", "referenceString")(code2);
}
function referenceFullAfter(code2) {
return self2.parser.defined.includes(normalizeIdentifier(self2.sliceSerialize(self2.events[self2.events.length - 1][1]).slice(1, -1))) ? ok3(code2) : nok(code2);
}
function referenceFullMissing(code2) {
return nok(code2);
}
}
function tokenizeReferenceCollapsed(effects, ok3, nok) {
return referenceCollapsedStart;
function referenceCollapsedStart(code2) {
effects.enter("reference");
effects.enter("referenceMarker");
effects.consume(code2);
effects.exit("referenceMarker");
return referenceCollapsedOpen;
}
function referenceCollapsedOpen(code2) {
if (code2 === 93) {
effects.enter("referenceMarker");
effects.consume(code2);
effects.exit("referenceMarker");
effects.exit("reference");
return ok3;
}
return nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/label-start-image.js
var labelStartImage = {
name: "labelStartImage",
resolveAll: labelEnd.resolveAll,
tokenize: tokenizeLabelStartImage
};
function tokenizeLabelStartImage(effects, ok3, nok) {
const self2 = this;
return start;
function start(code2) {
effects.enter("labelImage");
effects.enter("labelImageMarker");
effects.consume(code2);
effects.exit("labelImageMarker");
return open;
}
function open(code2) {
if (code2 === 91) {
effects.enter("labelMarker");
effects.consume(code2);
effects.exit("labelMarker");
effects.exit("labelImage");
return after;
}
return nok(code2);
}
function after(code2) {
return code2 === 94 && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code2) : ok3(code2);
}
}
// node_modules/micromark-core-commonmark/lib/label-start-link.js
var labelStartLink = {
name: "labelStartLink",
resolveAll: labelEnd.resolveAll,
tokenize: tokenizeLabelStartLink
};
function tokenizeLabelStartLink(effects, ok3, nok) {
const self2 = this;
return start;
function start(code2) {
effects.enter("labelLink");
effects.enter("labelMarker");
effects.consume(code2);
effects.exit("labelMarker");
effects.exit("labelLink");
return after;
}
function after(code2) {
return code2 === 94 && "_hiddenFootnoteSupport" in self2.parser.constructs ? nok(code2) : ok3(code2);
}
}
// node_modules/micromark-core-commonmark/lib/line-ending.js
var lineEnding = {
name: "lineEnding",
tokenize: tokenizeLineEnding
};
function tokenizeLineEnding(effects, ok3) {
return start;
function start(code2) {
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
return factorySpace(effects, ok3, "linePrefix");
}
}
// node_modules/micromark-core-commonmark/lib/thematic-break.js
var thematicBreak = {
name: "thematicBreak",
tokenize: tokenizeThematicBreak
};
function tokenizeThematicBreak(effects, ok3, nok) {
let size = 0;
let marker;
return start;
function start(code2) {
effects.enter("thematicBreak");
return before(code2);
}
function before(code2) {
marker = code2;
return atBreak(code2);
}
function atBreak(code2) {
if (code2 === marker) {
effects.enter("thematicBreakSequence");
return sequence(code2);
}
if (size >= 3 && (code2 === null || markdownLineEnding(code2))) {
effects.exit("thematicBreak");
return ok3(code2);
}
return nok(code2);
}
function sequence(code2) {
if (code2 === marker) {
effects.consume(code2);
size++;
return sequence;
}
effects.exit("thematicBreakSequence");
return markdownSpace(code2) ? factorySpace(effects, atBreak, "whitespace")(code2) : atBreak(code2);
}
}
// node_modules/micromark-core-commonmark/lib/list.js
var list = {
continuation: {
tokenize: tokenizeListContinuation
},
exit: tokenizeListEnd,
name: "list",
tokenize: tokenizeListStart
};
var listItemPrefixWhitespaceConstruct = {
partial: true,
tokenize: tokenizeListItemPrefixWhitespace
};
var indentConstruct = {
partial: true,
tokenize: tokenizeIndent
};
function tokenizeListStart(effects, ok3, nok) {
const self2 = this;
const tail = self2.events[self2.events.length - 1];
let initialSize = tail && tail[1].type === "linePrefix" ? tail[2].sliceSerialize(tail[1], true).length : 0;
let size = 0;
return start;
function start(code2) {
const kind = self2.containerState.type || (code2 === 42 || code2 === 43 || code2 === 45 ? "listUnordered" : "listOrdered");
if (kind === "listUnordered" ? !self2.containerState.marker || code2 === self2.containerState.marker : asciiDigit(code2)) {
if (!self2.containerState.type) {
self2.containerState.type = kind;
effects.enter(kind, {
_container: true
});
}
if (kind === "listUnordered") {
effects.enter("listItemPrefix");
return code2 === 42 || code2 === 45 ? effects.check(thematicBreak, nok, atMarker)(code2) : atMarker(code2);
}
if (!self2.interrupt || code2 === 49) {
effects.enter("listItemPrefix");
effects.enter("listItemValue");
return inside(code2);
}
}
return nok(code2);
}
function inside(code2) {
if (asciiDigit(code2) && ++size < 10) {
effects.consume(code2);
return inside;
}
if ((!self2.interrupt || size < 2) && (self2.containerState.marker ? code2 === self2.containerState.marker : code2 === 41 || code2 === 46)) {
effects.exit("listItemValue");
return atMarker(code2);
}
return nok(code2);
}
function atMarker(code2) {
effects.enter("listItemMarker");
effects.consume(code2);
effects.exit("listItemMarker");
self2.containerState.marker = self2.containerState.marker || code2;
return effects.check(
blankLine,
// Cant be empty when interrupting.
self2.interrupt ? nok : onBlank,
effects.attempt(listItemPrefixWhitespaceConstruct, endOfPrefix, otherPrefix)
);
}
function onBlank(code2) {
self2.containerState.initialBlankLine = true;
initialSize++;
return endOfPrefix(code2);
}
function otherPrefix(code2) {
if (markdownSpace(code2)) {
effects.enter("listItemPrefixWhitespace");
effects.consume(code2);
effects.exit("listItemPrefixWhitespace");
return endOfPrefix;
}
return nok(code2);
}
function endOfPrefix(code2) {
self2.containerState.size = initialSize + self2.sliceSerialize(effects.exit("listItemPrefix"), true).length;
return ok3(code2);
}
}
function tokenizeListContinuation(effects, ok3, nok) {
const self2 = this;
self2.containerState._closeFlow = void 0;
return effects.check(blankLine, onBlank, notBlank);
function onBlank(code2) {
self2.containerState.furtherBlankLines = self2.containerState.furtherBlankLines || self2.containerState.initialBlankLine;
return factorySpace(effects, ok3, "listItemIndent", self2.containerState.size + 1)(code2);
}
function notBlank(code2) {
if (self2.containerState.furtherBlankLines || !markdownSpace(code2)) {
self2.containerState.furtherBlankLines = void 0;
self2.containerState.initialBlankLine = void 0;
return notInCurrentItem(code2);
}
self2.containerState.furtherBlankLines = void 0;
self2.containerState.initialBlankLine = void 0;
return effects.attempt(indentConstruct, ok3, notInCurrentItem)(code2);
}
function notInCurrentItem(code2) {
self2.containerState._closeFlow = true;
self2.interrupt = void 0;
return factorySpace(effects, effects.attempt(list, ok3, nok), "linePrefix", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4)(code2);
}
}
function tokenizeIndent(effects, ok3, nok) {
const self2 = this;
return factorySpace(effects, afterPrefix, "listItemIndent", self2.containerState.size + 1);
function afterPrefix(code2) {
const tail = self2.events[self2.events.length - 1];
return tail && tail[1].type === "listItemIndent" && tail[2].sliceSerialize(tail[1], true).length === self2.containerState.size ? ok3(code2) : nok(code2);
}
}
function tokenizeListEnd(effects) {
effects.exit(this.containerState.type);
}
function tokenizeListItemPrefixWhitespace(effects, ok3, nok) {
const self2 = this;
return factorySpace(effects, afterPrefix, "listItemPrefixWhitespace", self2.parser.constructs.disable.null.includes("codeIndented") ? void 0 : 4 + 1);
function afterPrefix(code2) {
const tail = self2.events[self2.events.length - 1];
return !markdownSpace(code2) && tail && tail[1].type === "listItemPrefixWhitespace" ? ok3(code2) : nok(code2);
}
}
// node_modules/micromark-core-commonmark/lib/setext-underline.js
var setextUnderline = {
name: "setextUnderline",
resolveTo: resolveToSetextUnderline,
tokenize: tokenizeSetextUnderline
};
function resolveToSetextUnderline(events, context) {
let index = events.length;
let content3;
let text4;
let definition2;
while (index--) {
if (events[index][0] === "enter") {
if (events[index][1].type === "content") {
content3 = index;
break;
}
if (events[index][1].type === "paragraph") {
text4 = index;
}
} else {
if (events[index][1].type === "content") {
events.splice(index, 1);
}
if (!definition2 && events[index][1].type === "definition") {
definition2 = index;
}
}
}
const heading2 = {
type: "setextHeading",
start: {
...events[content3][1].start
},
end: {
...events[events.length - 1][1].end
}
};
events[text4][1].type = "setextHeadingText";
if (definition2) {
events.splice(text4, 0, ["enter", heading2, context]);
events.splice(definition2 + 1, 0, ["exit", events[content3][1], context]);
events[content3][1].end = {
...events[definition2][1].end
};
} else {
events[content3][1] = heading2;
}
events.push(["exit", heading2, context]);
return events;
}
function tokenizeSetextUnderline(effects, ok3, nok) {
const self2 = this;
let marker;
return start;
function start(code2) {
let index = self2.events.length;
let paragraph2;
while (index--) {
if (self2.events[index][1].type !== "lineEnding" && self2.events[index][1].type !== "linePrefix" && self2.events[index][1].type !== "content") {
paragraph2 = self2.events[index][1].type === "paragraph";
break;
}
}
if (!self2.parser.lazy[self2.now().line] && (self2.interrupt || paragraph2)) {
effects.enter("setextHeadingLine");
marker = code2;
return before(code2);
}
return nok(code2);
}
function before(code2) {
effects.enter("setextHeadingLineSequence");
return inside(code2);
}
function inside(code2) {
if (code2 === marker) {
effects.consume(code2);
return inside;
}
effects.exit("setextHeadingLineSequence");
return markdownSpace(code2) ? factorySpace(effects, after, "lineSuffix")(code2) : after(code2);
}
function after(code2) {
if (code2 === null || markdownLineEnding(code2)) {
effects.exit("setextHeadingLine");
return ok3(code2);
}
return nok(code2);
}
}
// node_modules/micromark/lib/initialize/flow.js
var flow = {
tokenize: initializeFlow
};
function initializeFlow(effects) {
const self2 = this;
const initial = effects.attempt(
// Try to parse a blank line.
blankLine,
atBlankEnding,
// Try to parse initial flow (essentially, only code).
effects.attempt(this.parser.constructs.flowInitial, afterConstruct, factorySpace(effects, effects.attempt(this.parser.constructs.flow, afterConstruct, effects.attempt(content2, afterConstruct)), "linePrefix"))
);
return initial;
function atBlankEnding(code2) {
if (code2 === null) {
effects.consume(code2);
return;
}
effects.enter("lineEndingBlank");
effects.consume(code2);
effects.exit("lineEndingBlank");
self2.currentConstruct = void 0;
return initial;
}
function afterConstruct(code2) {
if (code2 === null) {
effects.consume(code2);
return;
}
effects.enter("lineEnding");
effects.consume(code2);
effects.exit("lineEnding");
self2.currentConstruct = void 0;
return initial;
}
}
// node_modules/micromark/lib/initialize/text.js
var resolver = {
resolveAll: createResolver()
};
var string = initializeFactory("string");
var text = initializeFactory("text");
function initializeFactory(field) {
return {
resolveAll: createResolver(field === "text" ? resolveAllLineSuffixes : void 0),
tokenize: initializeText
};
function initializeText(effects) {
const self2 = this;
const constructs2 = this.parser.constructs[field];
const text4 = effects.attempt(constructs2, start, notText);
return start;
function start(code2) {
return atBreak(code2) ? text4(code2) : notText(code2);
}
function notText(code2) {
if (code2 === null) {
effects.consume(code2);
return;
}
effects.enter("data");
effects.consume(code2);
return data;
}
function data(code2) {
if (atBreak(code2)) {
effects.exit("data");
return text4(code2);
}
effects.consume(code2);
return data;
}
function atBreak(code2) {
if (code2 === null) {
return true;
}
const list3 = constructs2[code2];
let index = -1;
if (list3) {
while (++index < list3.length) {
const item = list3[index];
if (!item.previous || item.previous.call(self2, self2.previous)) {
return true;
}
}
}
return false;
}
}
}
function createResolver(extraResolver) {
return resolveAllText;
function resolveAllText(events, context) {
let index = -1;
let enter;
while (++index <= events.length) {
if (enter === void 0) {
if (events[index] && events[index][1].type === "data") {
enter = index;
index++;
}
} else if (!events[index] || events[index][1].type !== "data") {
if (index !== enter + 2) {
events[enter][1].end = events[index - 1][1].end;
events.splice(enter + 2, index - enter - 2);
index = enter + 2;
}
enter = void 0;
}
}
return extraResolver ? extraResolver(events, context) : events;
}
}
function resolveAllLineSuffixes(events, context) {
let eventIndex = 0;
while (++eventIndex <= events.length) {
if ((eventIndex === events.length || events[eventIndex][1].type === "lineEnding") && events[eventIndex - 1][1].type === "data") {
const data = events[eventIndex - 1][1];
const chunks = context.sliceStream(data);
let index = chunks.length;
let bufferIndex = -1;
let size = 0;
let tabs;
while (index--) {
const chunk = chunks[index];
if (typeof chunk === "string") {
bufferIndex = chunk.length;
while (chunk.charCodeAt(bufferIndex - 1) === 32) {
size++;
bufferIndex--;
}
if (bufferIndex) break;
bufferIndex = -1;
} else if (chunk === -2) {
tabs = true;
size++;
} else if (chunk === -1) {
} else {
index++;
break;
}
}
if (context._contentTypeTextTrailing && eventIndex === events.length) {
size = 0;
}
if (size) {
const token = {
type: eventIndex === events.length || tabs || size < 2 ? "lineSuffix" : "hardBreakTrailing",
start: {
_bufferIndex: index ? bufferIndex : data.start._bufferIndex + bufferIndex,
_index: data.start._index + index,
line: data.end.line,
column: data.end.column - size,
offset: data.end.offset - size
},
end: {
...data.end
}
};
data.end = {
...token.start
};
if (data.start.offset === data.end.offset) {
Object.assign(data, token);
} else {
events.splice(eventIndex, 0, ["enter", token, context], ["exit", token, context]);
eventIndex += 2;
}
}
eventIndex++;
}
}
return events;
}
// node_modules/micromark/lib/constructs.js
var constructs_exports = {};
__export(constructs_exports, {
attentionMarkers: () => attentionMarkers,
contentInitial: () => contentInitial,
disable: () => disable,
document: () => document3,
flow: () => flow2,
flowInitial: () => flowInitial,
insideSpan: () => insideSpan,
string: () => string2,
text: () => text2
});
var document3 = {
[42]: list,
[43]: list,
[45]: list,
[48]: list,
[49]: list,
[50]: list,
[51]: list,
[52]: list,
[53]: list,
[54]: list,
[55]: list,
[56]: list,
[57]: list,
[62]: blockQuote
};
var contentInitial = {
[91]: definition
};
var flowInitial = {
[-2]: codeIndented,
[-1]: codeIndented,
[32]: codeIndented
};
var flow2 = {
[35]: headingAtx,
[42]: thematicBreak,
[45]: [setextUnderline, thematicBreak],
[60]: htmlFlow,
[61]: setextUnderline,
[95]: thematicBreak,
[96]: codeFenced,
[126]: codeFenced
};
var string2 = {
[38]: characterReference,
[92]: characterEscape
};
var text2 = {
[-5]: lineEnding,
[-4]: lineEnding,
[-3]: lineEnding,
[33]: labelStartImage,
[38]: characterReference,
[42]: attention,
[60]: [autolink, htmlText],
[91]: labelStartLink,
[92]: [hardBreakEscape, characterEscape],
[93]: labelEnd,
[95]: attention,
[96]: codeText
};
var insideSpan = {
null: [attention, resolver]
};
var attentionMarkers = {
null: [42, 95]
};
var disable = {
null: []
};
// node_modules/micromark/lib/create-tokenizer.js
function createTokenizer(parser, initialize, from) {
let point2 = {
_bufferIndex: -1,
_index: 0,
line: from && from.line || 1,
column: from && from.column || 1,
offset: from && from.offset || 0
};
const columnStart = {};
const resolveAllConstructs = [];
let chunks = [];
let stack = [];
let consumed = true;
const effects = {
attempt: constructFactory(onsuccessfulconstruct),
check: constructFactory(onsuccessfulcheck),
consume,
enter,
exit: exit2,
interrupt: constructFactory(onsuccessfulcheck, {
interrupt: true
})
};
const context = {
code: null,
containerState: {},
defineSkip,
events: [],
now,
parser,
previous: null,
sliceSerialize,
sliceStream,
write
};
let state = initialize.tokenize.call(context, effects);
let expectedCode;
if (initialize.resolveAll) {
resolveAllConstructs.push(initialize);
}
return context;
function write(slice) {
chunks = push(chunks, slice);
main();
if (chunks[chunks.length - 1] !== null) {
return [];
}
addResult(initialize, 0);
context.events = resolveAll(resolveAllConstructs, context.events, context);
return context.events;
}
function sliceSerialize(token, expandTabs) {
return serializeChunks(sliceStream(token), expandTabs);
}
function sliceStream(token) {
return sliceChunks(chunks, token);
}
function now() {
const {
_bufferIndex,
_index,
line,
column,
offset
} = point2;
return {
_bufferIndex,
_index,
line,
column,
offset
};
}
function defineSkip(value) {
columnStart[value.line] = value.column;
accountForPotentialSkip();
}
function main() {
let chunkIndex;
while (point2._index < chunks.length) {
const chunk = chunks[point2._index];
if (typeof chunk === "string") {
chunkIndex = point2._index;
if (point2._bufferIndex < 0) {
point2._bufferIndex = 0;
}
while (point2._index === chunkIndex && point2._bufferIndex < chunk.length) {
go(chunk.charCodeAt(point2._bufferIndex));
}
} else {
go(chunk);
}
}
}
function go(code2) {
consumed = void 0;
expectedCode = code2;
state = state(code2);
}
function consume(code2) {
if (markdownLineEnding(code2)) {
point2.line++;
point2.column = 1;
point2.offset += code2 === -3 ? 2 : 1;
accountForPotentialSkip();
} else if (code2 !== -1) {
point2.column++;
point2.offset++;
}
if (point2._bufferIndex < 0) {
point2._index++;
} else {
point2._bufferIndex++;
if (point2._bufferIndex === // Points w/ non-negative `_bufferIndex` reference
// strings.
/** @type {string} */
chunks[point2._index].length) {
point2._bufferIndex = -1;
point2._index++;
}
}
context.previous = code2;
consumed = true;
}
function enter(type, fields) {
const token = fields || {};
token.type = type;
token.start = now();
context.events.push(["enter", token, context]);
stack.push(token);
return token;
}
function exit2(type) {
const token = stack.pop();
token.end = now();
context.events.push(["exit", token, context]);
return token;
}
function onsuccessfulconstruct(construct, info) {
addResult(construct, info.from);
}
function onsuccessfulcheck(_, info) {
info.restore();
}
function constructFactory(onreturn, fields) {
return hook;
function hook(constructs2, returnState, bogusState) {
let listOfConstructs;
let constructIndex;
let currentConstruct;
let info;
return Array.isArray(constructs2) ? (
/* c8 ignore next 1 */
handleListOfConstructs(constructs2)
) : "tokenize" in constructs2 ? (
// Looks like a construct.
handleListOfConstructs([
/** @type {Construct} */
constructs2
])
) : handleMapOfConstructs(constructs2);
function handleMapOfConstructs(map) {
return start;
function start(code2) {
const left = code2 !== null && map[code2];
const all2 = code2 !== null && map.null;
const list3 = [
// To do: add more extension tests.
/* c8 ignore next 2 */
...Array.isArray(left) ? left : left ? [left] : [],
...Array.isArray(all2) ? all2 : all2 ? [all2] : []
];
return handleListOfConstructs(list3)(code2);
}
}
function handleListOfConstructs(list3) {
listOfConstructs = list3;
constructIndex = 0;
if (list3.length === 0) {
return bogusState;
}
return handleConstruct(list3[constructIndex]);
}
function handleConstruct(construct) {
return start;
function start(code2) {
info = store();
currentConstruct = construct;
if (!construct.partial) {
context.currentConstruct = construct;
}
if (construct.name && context.parser.constructs.disable.null.includes(construct.name)) {
return nok(code2);
}
return construct.tokenize.call(
// If we do have fields, create an object w/ `context` as its
// prototype.
// This allows a “live binding”, which is needed for `interrupt`.
fields ? Object.assign(Object.create(context), fields) : context,
effects,
ok3,
nok
)(code2);
}
}
function ok3(code2) {
consumed = true;
onreturn(currentConstruct, info);
return returnState;
}
function nok(code2) {
consumed = true;
info.restore();
if (++constructIndex < listOfConstructs.length) {
return handleConstruct(listOfConstructs[constructIndex]);
}
return bogusState;
}
}
}
function addResult(construct, from2) {
if (construct.resolveAll && !resolveAllConstructs.includes(construct)) {
resolveAllConstructs.push(construct);
}
if (construct.resolve) {
splice(context.events, from2, context.events.length - from2, construct.resolve(context.events.slice(from2), context));
}
if (construct.resolveTo) {
context.events = construct.resolveTo(context.events, context);
}
}
function store() {
const startPoint = now();
const startPrevious = context.previous;
const startCurrentConstruct = context.currentConstruct;
const startEventsIndex = context.events.length;
const startStack = Array.from(stack);
return {
from: startEventsIndex,
restore
};
function restore() {
point2 = startPoint;
context.previous = startPrevious;
context.currentConstruct = startCurrentConstruct;
context.events.length = startEventsIndex;
stack = startStack;
accountForPotentialSkip();
}
}
function accountForPotentialSkip() {
if (point2.line in columnStart && point2.column < 2) {
point2.column = columnStart[point2.line];
point2.offset += columnStart[point2.line] - 1;
}
}
}
function sliceChunks(chunks, token) {
const startIndex = token.start._index;
const startBufferIndex = token.start._bufferIndex;
const endIndex = token.end._index;
const endBufferIndex = token.end._bufferIndex;
let view;
if (startIndex === endIndex) {
view = [chunks[startIndex].slice(startBufferIndex, endBufferIndex)];
} else {
view = chunks.slice(startIndex, endIndex);
if (startBufferIndex > -1) {
const head = view[0];
if (typeof head === "string") {
view[0] = head.slice(startBufferIndex);
} else {
view.shift();
}
}
if (endBufferIndex > 0) {
view.push(chunks[endIndex].slice(0, endBufferIndex));
}
}
return view;
}
function serializeChunks(chunks, expandTabs) {
let index = -1;
const result = [];
let atTab;
while (++index < chunks.length) {
const chunk = chunks[index];
let value;
if (typeof chunk === "string") {
value = chunk;
} else switch (chunk) {
case -5: {
value = "\r";
break;
}
case -4: {
value = "\n";
break;
}
case -3: {
value = "\r\n";
break;
}
case -2: {
value = expandTabs ? " " : " ";
break;
}
case -1: {
if (!expandTabs && atTab) continue;
value = " ";
break;
}
default: {
value = String.fromCharCode(chunk);
}
}
atTab = chunk === -2;
result.push(value);
}
return result.join("");
}
// node_modules/micromark/lib/parse.js
function parse(options) {
const settings = options || {};
const constructs2 = (
/** @type {FullNormalizedExtension} */
combineExtensions([constructs_exports, ...settings.extensions || []])
);
const parser = {
constructs: constructs2,
content: create(content),
defined: [],
document: create(document2),
flow: create(flow),
lazy: {},
string: create(string),
text: create(text)
};
return parser;
function create(initial) {
return creator;
function creator(from) {
return createTokenizer(parser, initial, from);
}
}
}
// node_modules/micromark/lib/postprocess.js
function postprocess(events) {
while (!subtokenize(events)) {
}
return events;
}
// node_modules/micromark/lib/preprocess.js
var search = /[\0\t\n\r]/g;
function preprocess() {
let column = 1;
let buffer = "";
let start = true;
let atCarriageReturn;
return preprocessor;
function preprocessor(value, encoding, end) {
const chunks = [];
let match;
let next;
let startPosition;
let endPosition;
let code2;
value = buffer + (typeof value === "string" ? value.toString() : new TextDecoder(encoding || void 0).decode(value));
startPosition = 0;
buffer = "";
if (start) {
if (value.charCodeAt(0) === 65279) {
startPosition++;
}
start = void 0;
}
while (startPosition < value.length) {
search.lastIndex = startPosition;
match = search.exec(value);
endPosition = match && match.index !== void 0 ? match.index : value.length;
code2 = value.charCodeAt(endPosition);
if (!match) {
buffer = value.slice(startPosition);
break;
}
if (code2 === 10 && startPosition === endPosition && atCarriageReturn) {
chunks.push(-3);
atCarriageReturn = void 0;
} else {
if (atCarriageReturn) {
chunks.push(-5);
atCarriageReturn = void 0;
}
if (startPosition < endPosition) {
chunks.push(value.slice(startPosition, endPosition));
column += endPosition - startPosition;
}
switch (code2) {
case 0: {
chunks.push(65533);
column++;
break;
}
case 9: {
next = Math.ceil(column / 4) * 4;
chunks.push(-2);
while (column++ < next) chunks.push(-1);
break;
}
case 10: {
chunks.push(-4);
column = 1;
break;
}
default: {
atCarriageReturn = true;
column = 1;
}
}
}
startPosition = endPosition + 1;
}
if (end) {
if (atCarriageReturn) chunks.push(-5);
if (buffer) chunks.push(buffer);
chunks.push(null);
}
return chunks;
}
}
// node_modules/micromark-util-decode-string/index.js
var characterEscapeOrReference = /\\([!-/:-@[-`{-~])|&(#(?:\d{1,7}|x[\da-f]{1,6})|[\da-z]{1,31});/gi;
function decodeString(value) {
return value.replace(characterEscapeOrReference, decode);
}
function decode($0, $1, $2) {
if ($1) {
return $1;
}
const head = $2.charCodeAt(0);
if (head === 35) {
const head2 = $2.charCodeAt(1);
const hex = head2 === 120 || head2 === 88;
return decodeNumericCharacterReference($2.slice(hex ? 2 : 1), hex ? 16 : 10);
}
return decodeNamedCharacterReference($2) || $0;
}
// node_modules/mdast-util-from-markdown/lib/index.js
var own = {}.hasOwnProperty;
function fromMarkdown(value, encoding, options) {
if (typeof encoding !== "string") {
options = encoding;
encoding = void 0;
}
return compiler(options)(postprocess(parse(options).document().write(preprocess()(value, encoding, true))));
}
function compiler(options) {
const config = {
transforms: [],
canContainEols: ["emphasis", "fragment", "heading", "paragraph", "strong"],
enter: {
autolink: opener(link2),
autolinkProtocol: onenterdata,
autolinkEmail: onenterdata,
atxHeading: opener(heading2),
blockQuote: opener(blockQuote2),
characterEscape: onenterdata,
characterReference: onenterdata,
codeFenced: opener(codeFlow),
codeFencedFenceInfo: buffer,
codeFencedFenceMeta: buffer,
codeIndented: opener(codeFlow, buffer),
codeText: opener(codeText2, buffer),
codeTextData: onenterdata,
data: onenterdata,
codeFlowValue: onenterdata,
definition: opener(definition2),
definitionDestinationString: buffer,
definitionLabelString: buffer,
definitionTitleString: buffer,
emphasis: opener(emphasis2),
hardBreakEscape: opener(hardBreak2),
hardBreakTrailing: opener(hardBreak2),
htmlFlow: opener(html2, buffer),
htmlFlowData: onenterdata,
htmlText: opener(html2, buffer),
htmlTextData: onenterdata,
image: opener(image2),
label: buffer,
link: opener(link2),
listItem: opener(listItem2),
listItemValue: onenterlistitemvalue,
listOrdered: opener(list3, onenterlistordered),
listUnordered: opener(list3),
paragraph: opener(paragraph2),
reference: onenterreference,
referenceString: buffer,
resourceDestinationString: buffer,
resourceTitleString: buffer,
setextHeading: opener(heading2),
strong: opener(strong2),
thematicBreak: opener(thematicBreak3)
},
exit: {
atxHeading: closer(),
atxHeadingSequence: onexitatxheadingsequence,
autolink: closer(),
autolinkEmail: onexitautolinkemail,
autolinkProtocol: onexitautolinkprotocol,
blockQuote: closer(),
characterEscapeValue: onexitdata,
characterReferenceMarkerHexadecimal: onexitcharacterreferencemarker,
characterReferenceMarkerNumeric: onexitcharacterreferencemarker,
characterReferenceValue: onexitcharacterreferencevalue,
characterReference: onexitcharacterreference,
codeFenced: closer(onexitcodefenced),
codeFencedFence: onexitcodefencedfence,
codeFencedFenceInfo: onexitcodefencedfenceinfo,
codeFencedFenceMeta: onexitcodefencedfencemeta,
codeFlowValue: onexitdata,
codeIndented: closer(onexitcodeindented),
codeText: closer(onexitcodetext),
codeTextData: onexitdata,
data: onexitdata,
definition: closer(),
definitionDestinationString: onexitdefinitiondestinationstring,
definitionLabelString: onexitdefinitionlabelstring,
definitionTitleString: onexitdefinitiontitlestring,
emphasis: closer(),
hardBreakEscape: closer(onexithardbreak),
hardBreakTrailing: closer(onexithardbreak),
htmlFlow: closer(onexithtmlflow),
htmlFlowData: onexitdata,
htmlText: closer(onexithtmltext),
htmlTextData: onexitdata,
image: closer(onexitimage),
label: onexitlabel,
labelText: onexitlabeltext,
lineEnding: onexitlineending,
link: closer(onexitlink),
listItem: closer(),
listOrdered: closer(),
listUnordered: closer(),
paragraph: closer(),
referenceString: onexitreferencestring,
resourceDestinationString: onexitresourcedestinationstring,
resourceTitleString: onexitresourcetitlestring,
resource: onexitresource,
setextHeading: closer(onexitsetextheading),
setextHeadingLineSequence: onexitsetextheadinglinesequence,
setextHeadingText: onexitsetextheadingtext,
strong: closer(),
thematicBreak: closer()
}
};
configure(config, (options || {}).mdastExtensions || []);
const data = {};
return compile2;
function compile2(events) {
let tree = {
type: "root",
children: []
};
const context = {
stack: [tree],
tokenStack: [],
config,
enter,
exit: exit2,
buffer,
resume,
data
};
const listStack = [];
let index = -1;
while (++index < events.length) {
if (events[index][1].type === "listOrdered" || events[index][1].type === "listUnordered") {
if (events[index][0] === "enter") {
listStack.push(index);
} else {
const tail = listStack.pop();
index = prepareList(events, tail, index);
}
}
}
index = -1;
while (++index < events.length) {
const handler = config[events[index][0]];
if (own.call(handler, events[index][1].type)) {
handler[events[index][1].type].call(Object.assign({
sliceSerialize: events[index][2].sliceSerialize
}, context), events[index][1]);
}
}
if (context.tokenStack.length > 0) {
const tail = context.tokenStack[context.tokenStack.length - 1];
const handler = tail[1] || defaultOnError;
handler.call(context, void 0, tail[0]);
}
tree.position = {
start: point(events.length > 0 ? events[0][1].start : {
line: 1,
column: 1,
offset: 0
}),
end: point(events.length > 0 ? events[events.length - 2][1].end : {
line: 1,
column: 1,
offset: 0
})
};
index = -1;
while (++index < config.transforms.length) {
tree = config.transforms[index](tree) || tree;
}
return tree;
}
function prepareList(events, start, length) {
let index = start - 1;
let containerBalance = -1;
let listSpread = false;
let listItem3;
let lineIndex;
let firstBlankLineIndex;
let atMarker;
while (++index <= length) {
const event = events[index];
switch (event[1].type) {
case "listUnordered":
case "listOrdered":
case "blockQuote": {
if (event[0] === "enter") {
containerBalance++;
} else {
containerBalance--;
}
atMarker = void 0;
break;
}
case "lineEndingBlank": {
if (event[0] === "enter") {
if (listItem3 && !atMarker && !containerBalance && !firstBlankLineIndex) {
firstBlankLineIndex = index;
}
atMarker = void 0;
}
break;
}
case "linePrefix":
case "listItemValue":
case "listItemMarker":
case "listItemPrefix":
case "listItemPrefixWhitespace": {
break;
}
default: {
atMarker = void 0;
}
}
if (!containerBalance && event[0] === "enter" && event[1].type === "listItemPrefix" || containerBalance === -1 && event[0] === "exit" && (event[1].type === "listUnordered" || event[1].type === "listOrdered")) {
if (listItem3) {
let tailIndex = index;
lineIndex = void 0;
while (tailIndex--) {
const tailEvent = events[tailIndex];
if (tailEvent[1].type === "lineEnding" || tailEvent[1].type === "lineEndingBlank") {
if (tailEvent[0] === "exit") continue;
if (lineIndex) {
events[lineIndex][1].type = "lineEndingBlank";
listSpread = true;
}
tailEvent[1].type = "lineEnding";
lineIndex = tailIndex;
} else if (tailEvent[1].type === "linePrefix" || tailEvent[1].type === "blockQuotePrefix" || tailEvent[1].type === "blockQuotePrefixWhitespace" || tailEvent[1].type === "blockQuoteMarker" || tailEvent[1].type === "listItemIndent") {
} else {
break;
}
}
if (firstBlankLineIndex && (!lineIndex || firstBlankLineIndex < lineIndex)) {
listItem3._spread = true;
}
listItem3.end = Object.assign({}, lineIndex ? events[lineIndex][1].start : event[1].end);
events.splice(lineIndex || index, 0, ["exit", listItem3, event[2]]);
index++;
length++;
}
if (event[1].type === "listItemPrefix") {
const item = {
type: "listItem",
_spread: false,
start: Object.assign({}, event[1].start),
// @ts-expect-error: well add `end` in a second.
end: void 0
};
listItem3 = item;
events.splice(index, 0, ["enter", item, event[2]]);
index++;
length++;
firstBlankLineIndex = void 0;
atMarker = true;
}
}
}
events[start][1]._spread = listSpread;
return length;
}
function opener(create, and) {
return open;
function open(token) {
enter.call(this, create(token), token);
if (and) and.call(this, token);
}
}
function buffer() {
this.stack.push({
type: "fragment",
children: []
});
}
function enter(node2, token, errorHandler) {
const parent = this.stack[this.stack.length - 1];
const siblings = parent.children;
siblings.push(node2);
this.stack.push(node2);
this.tokenStack.push([token, errorHandler || void 0]);
node2.position = {
start: point(token.start),
// @ts-expect-error: `end` will be patched later.
end: void 0
};
}
function closer(and) {
return close;
function close(token) {
if (and) and.call(this, token);
exit2.call(this, token);
}
}
function exit2(token, onExitError) {
const node2 = this.stack.pop();
const open = this.tokenStack.pop();
if (!open) {
throw new Error("Cannot close `" + token.type + "` (" + stringifyPosition({
start: token.start,
end: token.end
}) + "): its not open");
} else if (open[0].type !== token.type) {
if (onExitError) {
onExitError.call(this, token, open[0]);
} else {
const handler = open[1] || defaultOnError;
handler.call(this, token, open[0]);
}
}
node2.position.end = point(token.end);
}
function resume() {
return toString(this.stack.pop());
}
function onenterlistordered() {
this.data.expectingFirstListItemValue = true;
}
function onenterlistitemvalue(token) {
if (this.data.expectingFirstListItemValue) {
const ancestor = this.stack[this.stack.length - 2];
ancestor.start = Number.parseInt(this.sliceSerialize(token), 10);
this.data.expectingFirstListItemValue = void 0;
}
}
function onexitcodefencedfenceinfo() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.lang = data2;
}
function onexitcodefencedfencemeta() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.meta = data2;
}
function onexitcodefencedfence() {
if (this.data.flowCodeInside) return;
this.buffer();
this.data.flowCodeInside = true;
}
function onexitcodefenced() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.value = data2.replace(/^(\r?\n|\r)|(\r?\n|\r)$/g, "");
this.data.flowCodeInside = void 0;
}
function onexitcodeindented() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.value = data2.replace(/(\r?\n|\r)$/g, "");
}
function onexitdefinitionlabelstring(token) {
const label = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.label = label;
node2.identifier = normalizeIdentifier(this.sliceSerialize(token)).toLowerCase();
}
function onexitdefinitiontitlestring() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.title = data2;
}
function onexitdefinitiondestinationstring() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.url = data2;
}
function onexitatxheadingsequence(token) {
const node2 = this.stack[this.stack.length - 1];
if (!node2.depth) {
const depth = this.sliceSerialize(token).length;
node2.depth = depth;
}
}
function onexitsetextheadingtext() {
this.data.setextHeadingSlurpLineEnding = true;
}
function onexitsetextheadinglinesequence(token) {
const node2 = this.stack[this.stack.length - 1];
node2.depth = this.sliceSerialize(token).codePointAt(0) === 61 ? 1 : 2;
}
function onexitsetextheading() {
this.data.setextHeadingSlurpLineEnding = void 0;
}
function onenterdata(token) {
const node2 = this.stack[this.stack.length - 1];
const siblings = node2.children;
let tail = siblings[siblings.length - 1];
if (!tail || tail.type !== "text") {
tail = text4();
tail.position = {
start: point(token.start),
// @ts-expect-error: well add `end` later.
end: void 0
};
siblings.push(tail);
}
this.stack.push(tail);
}
function onexitdata(token) {
const tail = this.stack.pop();
tail.value += this.sliceSerialize(token);
tail.position.end = point(token.end);
}
function onexitlineending(token) {
const context = this.stack[this.stack.length - 1];
if (this.data.atHardBreak) {
const tail = context.children[context.children.length - 1];
tail.position.end = point(token.end);
this.data.atHardBreak = void 0;
return;
}
if (!this.data.setextHeadingSlurpLineEnding && config.canContainEols.includes(context.type)) {
onenterdata.call(this, token);
onexitdata.call(this, token);
}
}
function onexithardbreak() {
this.data.atHardBreak = true;
}
function onexithtmlflow() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.value = data2;
}
function onexithtmltext() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.value = data2;
}
function onexitcodetext() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.value = data2;
}
function onexitlink() {
const node2 = this.stack[this.stack.length - 1];
if (this.data.inReference) {
const referenceType = this.data.referenceType || "shortcut";
node2.type += "Reference";
node2.referenceType = referenceType;
delete node2.url;
delete node2.title;
} else {
delete node2.identifier;
delete node2.label;
}
this.data.referenceType = void 0;
}
function onexitimage() {
const node2 = this.stack[this.stack.length - 1];
if (this.data.inReference) {
const referenceType = this.data.referenceType || "shortcut";
node2.type += "Reference";
node2.referenceType = referenceType;
delete node2.url;
delete node2.title;
} else {
delete node2.identifier;
delete node2.label;
}
this.data.referenceType = void 0;
}
function onexitlabeltext(token) {
const string3 = this.sliceSerialize(token);
const ancestor = this.stack[this.stack.length - 2];
ancestor.label = decodeString(string3);
ancestor.identifier = normalizeIdentifier(string3).toLowerCase();
}
function onexitlabel() {
const fragment = this.stack[this.stack.length - 1];
const value = this.resume();
const node2 = this.stack[this.stack.length - 1];
this.data.inReference = true;
if (node2.type === "link") {
const children = fragment.children;
node2.children = children;
} else {
node2.alt = value;
}
}
function onexitresourcedestinationstring() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.url = data2;
}
function onexitresourcetitlestring() {
const data2 = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.title = data2;
}
function onexitresource() {
this.data.inReference = void 0;
}
function onenterreference() {
this.data.referenceType = "collapsed";
}
function onexitreferencestring(token) {
const label = this.resume();
const node2 = this.stack[this.stack.length - 1];
node2.label = label;
node2.identifier = normalizeIdentifier(this.sliceSerialize(token)).toLowerCase();
this.data.referenceType = "full";
}
function onexitcharacterreferencemarker(token) {
this.data.characterReferenceType = token.type;
}
function onexitcharacterreferencevalue(token) {
const data2 = this.sliceSerialize(token);
const type = this.data.characterReferenceType;
let value;
if (type) {
value = decodeNumericCharacterReference(data2, type === "characterReferenceMarkerNumeric" ? 10 : 16);
this.data.characterReferenceType = void 0;
} else {
const result = decodeNamedCharacterReference(data2);
value = result;
}
const tail = this.stack[this.stack.length - 1];
tail.value += value;
}
function onexitcharacterreference(token) {
const tail = this.stack.pop();
tail.position.end = point(token.end);
}
function onexitautolinkprotocol(token) {
onexitdata.call(this, token);
const node2 = this.stack[this.stack.length - 1];
node2.url = this.sliceSerialize(token);
}
function onexitautolinkemail(token) {
onexitdata.call(this, token);
const node2 = this.stack[this.stack.length - 1];
node2.url = "mailto:" + this.sliceSerialize(token);
}
function blockQuote2() {
return {
type: "blockquote",
children: []
};
}
function codeFlow() {
return {
type: "code",
lang: null,
meta: null,
value: ""
};
}
function codeText2() {
return {
type: "inlineCode",
value: ""
};
}
function definition2() {
return {
type: "definition",
identifier: "",
label: null,
title: null,
url: ""
};
}
function emphasis2() {
return {
type: "emphasis",
children: []
};
}
function heading2() {
return {
type: "heading",
// @ts-expect-error `depth` will be set later.
depth: 0,
children: []
};
}
function hardBreak2() {
return {
type: "break"
};
}
function html2() {
return {
type: "html",
value: ""
};
}
function image2() {
return {
type: "image",
title: null,
url: "",
alt: null
};
}
function link2() {
return {
type: "link",
title: null,
url: "",
children: []
};
}
function list3(token) {
return {
type: "list",
ordered: token.type === "listOrdered",
start: null,
spread: token._spread,
children: []
};
}
function listItem2(token) {
return {
type: "listItem",
spread: token._spread,
checked: null,
children: []
};
}
function paragraph2() {
return {
type: "paragraph",
children: []
};
}
function strong2() {
return {
type: "strong",
children: []
};
}
function text4() {
return {
type: "text",
value: ""
};
}
function thematicBreak3() {
return {
type: "thematicBreak"
};
}
}
function point(d) {
return {
line: d.line,
column: d.column,
offset: d.offset
};
}
function configure(combined, extensions) {
let index = -1;
while (++index < extensions.length) {
const value = extensions[index];
if (Array.isArray(value)) {
configure(combined, value);
} else {
extension(combined, value);
}
}
}
function extension(combined, extension2) {
let key;
for (key in extension2) {
if (own.call(extension2, key)) {
switch (key) {
case "canContainEols": {
const right = extension2[key];
if (right) {
combined[key].push(...right);
}
break;
}
case "transforms": {
const right = extension2[key];
if (right) {
combined[key].push(...right);
}
break;
}
case "enter":
case "exit": {
const right = extension2[key];
if (right) {
Object.assign(combined[key], right);
}
break;
}
}
}
}
}
function defaultOnError(left, right) {
if (left) {
throw new Error("Cannot close `" + left.type + "` (" + stringifyPosition({
start: left.start,
end: left.end
}) + "): a different token (`" + right.type + "`, " + stringifyPosition({
start: right.start,
end: right.end
}) + ") is open");
} else {
throw new Error("Cannot close document, a token (`" + right.type + "`, " + stringifyPosition({
start: right.start,
end: right.end
}) + ") is still open");
}
}
// node_modules/remark-parse/lib/index.js
function remarkParse(options) {
const self2 = this;
self2.parser = parser;
function parser(doc) {
return fromMarkdown(doc, {
...self2.data("settings"),
...options,
// Note: these options are not in the readme.
// The goal is for them to be set by plugins on `data` instead of being
// passed by users.
extensions: self2.data("micromarkExtensions") || [],
mdastExtensions: self2.data("fromMarkdownExtensions") || []
});
}
}
// node_modules/mdast-util-to-hast/lib/handlers/blockquote.js
function blockquote(state, node2) {
const result = {
type: "element",
tagName: "blockquote",
properties: {},
children: state.wrap(state.all(node2), true)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/break.js
function hardBreak(state, node2) {
const result = { type: "element", tagName: "br", properties: {}, children: [] };
state.patch(node2, result);
return [state.applyData(node2, result), { type: "text", value: "\n" }];
}
// node_modules/mdast-util-to-hast/lib/handlers/code.js
function code(state, node2) {
const value = node2.value ? node2.value + "\n" : "";
const properties = {};
const language = node2.lang ? node2.lang.split(/\s+/) : [];
if (language.length > 0) {
properties.className = ["language-" + language[0]];
}
let result = {
type: "element",
tagName: "code",
properties,
children: [{ type: "text", value }]
};
if (node2.meta) {
result.data = { meta: node2.meta };
}
state.patch(node2, result);
result = state.applyData(node2, result);
result = { type: "element", tagName: "pre", properties: {}, children: [result] };
state.patch(node2, result);
return result;
}
// node_modules/mdast-util-to-hast/lib/handlers/delete.js
function strikethrough(state, node2) {
const result = {
type: "element",
tagName: "del",
properties: {},
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/emphasis.js
function emphasis(state, node2) {
const result = {
type: "element",
tagName: "em",
properties: {},
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/footnote-reference.js
function footnoteReference(state, node2) {
const clobberPrefix = typeof state.options.clobberPrefix === "string" ? state.options.clobberPrefix : "user-content-";
const id = String(node2.identifier).toUpperCase();
const safeId = normalizeUri(id.toLowerCase());
const index = state.footnoteOrder.indexOf(id);
let counter;
let reuseCounter = state.footnoteCounts.get(id);
if (reuseCounter === void 0) {
reuseCounter = 0;
state.footnoteOrder.push(id);
counter = state.footnoteOrder.length;
} else {
counter = index + 1;
}
reuseCounter += 1;
state.footnoteCounts.set(id, reuseCounter);
const link2 = {
type: "element",
tagName: "a",
properties: {
href: "#" + clobberPrefix + "fn-" + safeId,
id: clobberPrefix + "fnref-" + safeId + (reuseCounter > 1 ? "-" + reuseCounter : ""),
dataFootnoteRef: true,
ariaDescribedBy: ["footnote-label"]
},
children: [{ type: "text", value: String(counter) }]
};
state.patch(node2, link2);
const sup = {
type: "element",
tagName: "sup",
properties: {},
children: [link2]
};
state.patch(node2, sup);
return state.applyData(node2, sup);
}
// node_modules/mdast-util-to-hast/lib/handlers/heading.js
function heading(state, node2) {
const result = {
type: "element",
tagName: "h" + node2.depth,
properties: {},
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/html.js
function html(state, node2) {
if (state.options.allowDangerousHtml) {
const result = { type: "raw", value: node2.value };
state.patch(node2, result);
return state.applyData(node2, result);
}
return void 0;
}
// node_modules/mdast-util-to-hast/lib/revert.js
function revert(state, node2) {
const subtype = node2.referenceType;
let suffix = "]";
if (subtype === "collapsed") {
suffix += "[]";
} else if (subtype === "full") {
suffix += "[" + (node2.label || node2.identifier) + "]";
}
if (node2.type === "imageReference") {
return [{ type: "text", value: "![" + node2.alt + suffix }];
}
const contents = state.all(node2);
const head = contents[0];
if (head && head.type === "text") {
head.value = "[" + head.value;
} else {
contents.unshift({ type: "text", value: "[" });
}
const tail = contents[contents.length - 1];
if (tail && tail.type === "text") {
tail.value += suffix;
} else {
contents.push({ type: "text", value: suffix });
}
return contents;
}
// node_modules/mdast-util-to-hast/lib/handlers/image-reference.js
function imageReference(state, node2) {
const id = String(node2.identifier).toUpperCase();
const definition2 = state.definitionById.get(id);
if (!definition2) {
return revert(state, node2);
}
const properties = { src: normalizeUri(definition2.url || ""), alt: node2.alt };
if (definition2.title !== null && definition2.title !== void 0) {
properties.title = definition2.title;
}
const result = { type: "element", tagName: "img", properties, children: [] };
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/image.js
function image(state, node2) {
const properties = { src: normalizeUri(node2.url) };
if (node2.alt !== null && node2.alt !== void 0) {
properties.alt = node2.alt;
}
if (node2.title !== null && node2.title !== void 0) {
properties.title = node2.title;
}
const result = { type: "element", tagName: "img", properties, children: [] };
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/inline-code.js
function inlineCode(state, node2) {
const text4 = { type: "text", value: node2.value.replace(/\r?\n|\r/g, " ") };
state.patch(node2, text4);
const result = {
type: "element",
tagName: "code",
properties: {},
children: [text4]
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/link-reference.js
function linkReference(state, node2) {
const id = String(node2.identifier).toUpperCase();
const definition2 = state.definitionById.get(id);
if (!definition2) {
return revert(state, node2);
}
const properties = { href: normalizeUri(definition2.url || "") };
if (definition2.title !== null && definition2.title !== void 0) {
properties.title = definition2.title;
}
const result = {
type: "element",
tagName: "a",
properties,
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/link.js
function link(state, node2) {
const properties = { href: normalizeUri(node2.url) };
if (node2.title !== null && node2.title !== void 0) {
properties.title = node2.title;
}
const result = {
type: "element",
tagName: "a",
properties,
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/list-item.js
function listItem(state, node2, parent) {
const results = state.all(node2);
const loose = parent ? listLoose(parent) : listItemLoose(node2);
const properties = {};
const children = [];
if (typeof node2.checked === "boolean") {
const head = results[0];
let paragraph2;
if (head && head.type === "element" && head.tagName === "p") {
paragraph2 = head;
} else {
paragraph2 = { type: "element", tagName: "p", properties: {}, children: [] };
results.unshift(paragraph2);
}
if (paragraph2.children.length > 0) {
paragraph2.children.unshift({ type: "text", value: " " });
}
paragraph2.children.unshift({
type: "element",
tagName: "input",
properties: { type: "checkbox", checked: node2.checked, disabled: true },
children: []
});
properties.className = ["task-list-item"];
}
let index = -1;
while (++index < results.length) {
const child = results[index];
if (loose || index !== 0 || child.type !== "element" || child.tagName !== "p") {
children.push({ type: "text", value: "\n" });
}
if (child.type === "element" && child.tagName === "p" && !loose) {
children.push(...child.children);
} else {
children.push(child);
}
}
const tail = results[results.length - 1];
if (tail && (loose || tail.type !== "element" || tail.tagName !== "p")) {
children.push({ type: "text", value: "\n" });
}
const result = { type: "element", tagName: "li", properties, children };
state.patch(node2, result);
return state.applyData(node2, result);
}
function listLoose(node2) {
let loose = false;
if (node2.type === "list") {
loose = node2.spread || false;
const children = node2.children;
let index = -1;
while (!loose && ++index < children.length) {
loose = listItemLoose(children[index]);
}
}
return loose;
}
function listItemLoose(node2) {
const spread = node2.spread;
return spread === null || spread === void 0 ? node2.children.length > 1 : spread;
}
// node_modules/mdast-util-to-hast/lib/handlers/list.js
function list2(state, node2) {
const properties = {};
const results = state.all(node2);
let index = -1;
if (typeof node2.start === "number" && node2.start !== 1) {
properties.start = node2.start;
}
while (++index < results.length) {
const child = results[index];
if (child.type === "element" && child.tagName === "li" && child.properties && Array.isArray(child.properties.className) && child.properties.className.includes("task-list-item")) {
properties.className = ["contains-task-list"];
break;
}
}
const result = {
type: "element",
tagName: node2.ordered ? "ol" : "ul",
properties,
children: state.wrap(results, true)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/paragraph.js
function paragraph(state, node2) {
const result = {
type: "element",
tagName: "p",
properties: {},
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/root.js
function root(state, node2) {
const result = { type: "root", children: state.wrap(state.all(node2)) };
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/strong.js
function strong(state, node2) {
const result = {
type: "element",
tagName: "strong",
properties: {},
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/table.js
function table(state, node2) {
const rows = state.all(node2);
const firstRow = rows.shift();
const tableContent = [];
if (firstRow) {
const head = {
type: "element",
tagName: "thead",
properties: {},
children: state.wrap([firstRow], true)
};
state.patch(node2.children[0], head);
tableContent.push(head);
}
if (rows.length > 0) {
const body = {
type: "element",
tagName: "tbody",
properties: {},
children: state.wrap(rows, true)
};
const start = pointStart(node2.children[1]);
const end = pointEnd(node2.children[node2.children.length - 1]);
if (start && end) body.position = { start, end };
tableContent.push(body);
}
const result = {
type: "element",
tagName: "table",
properties: {},
children: state.wrap(tableContent, true)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/table-row.js
function tableRow(state, node2, parent) {
const siblings = parent ? parent.children : void 0;
const rowIndex = siblings ? siblings.indexOf(node2) : 1;
const tagName = rowIndex === 0 ? "th" : "td";
const align = parent && parent.type === "table" ? parent.align : void 0;
const length = align ? align.length : node2.children.length;
let cellIndex = -1;
const cells = [];
while (++cellIndex < length) {
const cell = node2.children[cellIndex];
const properties = {};
const alignValue = align ? align[cellIndex] : void 0;
if (alignValue) {
properties.align = alignValue;
}
let result2 = { type: "element", tagName, properties, children: [] };
if (cell) {
result2.children = state.all(cell);
state.patch(cell, result2);
result2 = state.applyData(cell, result2);
}
cells.push(result2);
}
const result = {
type: "element",
tagName: "tr",
properties: {},
children: state.wrap(cells, true)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/table-cell.js
function tableCell(state, node2) {
const result = {
type: "element",
tagName: "td",
// Assume body cell.
properties: {},
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/trim-lines/index.js
var tab = 9;
var space = 32;
function trimLines(value) {
const source = String(value);
const search2 = /\r?\n|\r/g;
let match = search2.exec(source);
let last = 0;
const lines = [];
while (match) {
lines.push(
trimLine(source.slice(last, match.index), last > 0, true),
match[0]
);
last = match.index + match[0].length;
match = search2.exec(source);
}
lines.push(trimLine(source.slice(last), last > 0, false));
return lines.join("");
}
function trimLine(value, start, end) {
let startIndex = 0;
let endIndex = value.length;
if (start) {
let code2 = value.codePointAt(startIndex);
while (code2 === tab || code2 === space) {
startIndex++;
code2 = value.codePointAt(startIndex);
}
}
if (end) {
let code2 = value.codePointAt(endIndex - 1);
while (code2 === tab || code2 === space) {
endIndex--;
code2 = value.codePointAt(endIndex - 1);
}
}
return endIndex > startIndex ? value.slice(startIndex, endIndex) : "";
}
// node_modules/mdast-util-to-hast/lib/handlers/text.js
function text3(state, node2) {
const result = { type: "text", value: trimLines(String(node2.value)) };
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/thematic-break.js
function thematicBreak2(state, node2) {
const result = {
type: "element",
tagName: "hr",
properties: {},
children: []
};
state.patch(node2, result);
return state.applyData(node2, result);
}
// node_modules/mdast-util-to-hast/lib/handlers/index.js
var handlers = {
blockquote,
break: hardBreak,
code,
delete: strikethrough,
emphasis,
footnoteReference,
heading,
html,
imageReference,
image,
inlineCode,
linkReference,
link,
listItem,
list: list2,
paragraph,
// @ts-expect-error: root is different, but hard to type.
root,
strong,
table,
tableCell,
tableRow,
text: text3,
thematicBreak: thematicBreak2,
toml: ignore,
yaml: ignore,
definition: ignore,
footnoteDefinition: ignore
};
function ignore() {
return void 0;
}
// node_modules/@ungap/structured-clone/esm/types.js
var VOID = -1;
var PRIMITIVE = 0;
var ARRAY = 1;
var OBJECT = 2;
var DATE = 3;
var REGEXP = 4;
var MAP = 5;
var SET = 6;
var ERROR = 7;
var BIGINT = 8;
// node_modules/@ungap/structured-clone/esm/deserialize.js
var env = typeof self === "object" ? self : globalThis;
var deserializer = ($, _) => {
const as = (out, index) => {
$.set(index, out);
return out;
};
const unpair = (index) => {
if ($.has(index))
return $.get(index);
const [type, value] = _[index];
switch (type) {
case PRIMITIVE:
case VOID:
return as(value, index);
case ARRAY: {
const arr = as([], index);
for (const index2 of value)
arr.push(unpair(index2));
return arr;
}
case OBJECT: {
const object = as({}, index);
for (const [key, index2] of value)
object[unpair(key)] = unpair(index2);
return object;
}
case DATE:
return as(new Date(value), index);
case REGEXP: {
const { source, flags } = value;
return as(new RegExp(source, flags), index);
}
case MAP: {
const map = as(/* @__PURE__ */ new Map(), index);
for (const [key, index2] of value)
map.set(unpair(key), unpair(index2));
return map;
}
case SET: {
const set = as(/* @__PURE__ */ new Set(), index);
for (const index2 of value)
set.add(unpair(index2));
return set;
}
case ERROR: {
const { name, message } = value;
return as(new env[name](message), index);
}
case BIGINT:
return as(BigInt(value), index);
case "BigInt":
return as(Object(BigInt(value)), index);
case "ArrayBuffer":
return as(new Uint8Array(value).buffer, value);
case "DataView": {
const { buffer } = new Uint8Array(value);
return as(new DataView(buffer), value);
}
}
return as(new env[type](value), index);
};
return unpair;
};
var deserialize = (serialized) => deserializer(/* @__PURE__ */ new Map(), serialized)(0);
// node_modules/@ungap/structured-clone/esm/serialize.js
var EMPTY = "";
var { toString: toString2 } = {};
var { keys } = Object;
var typeOf = (value) => {
const type = typeof value;
if (type !== "object" || !value)
return [PRIMITIVE, type];
const asString = toString2.call(value).slice(8, -1);
switch (asString) {
case "Array":
return [ARRAY, EMPTY];
case "Object":
return [OBJECT, EMPTY];
case "Date":
return [DATE, EMPTY];
case "RegExp":
return [REGEXP, EMPTY];
case "Map":
return [MAP, EMPTY];
case "Set":
return [SET, EMPTY];
case "DataView":
return [ARRAY, asString];
}
if (asString.includes("Array"))
return [ARRAY, asString];
if (asString.includes("Error"))
return [ERROR, asString];
return [OBJECT, asString];
};
var shouldSkip = ([TYPE, type]) => TYPE === PRIMITIVE && (type === "function" || type === "symbol");
var serializer = (strict, json, $, _) => {
const as = (out, value) => {
const index = _.push(out) - 1;
$.set(value, index);
return index;
};
const pair = (value) => {
if ($.has(value))
return $.get(value);
let [TYPE, type] = typeOf(value);
switch (TYPE) {
case PRIMITIVE: {
let entry = value;
switch (type) {
case "bigint":
TYPE = BIGINT;
entry = value.toString();
break;
case "function":
case "symbol":
if (strict)
throw new TypeError("unable to serialize " + type);
entry = null;
break;
case "undefined":
return as([VOID], value);
}
return as([TYPE, entry], value);
}
case ARRAY: {
if (type) {
let spread = value;
if (type === "DataView") {
spread = new Uint8Array(value.buffer);
} else if (type === "ArrayBuffer") {
spread = new Uint8Array(value);
}
return as([type, [...spread]], value);
}
const arr = [];
const index = as([TYPE, arr], value);
for (const entry of value)
arr.push(pair(entry));
return index;
}
case OBJECT: {
if (type) {
switch (type) {
case "BigInt":
return as([type, value.toString()], value);
case "Boolean":
case "Number":
case "String":
return as([type, value.valueOf()], value);
}
}
if (json && "toJSON" in value)
return pair(value.toJSON());
const entries = [];
const index = as([TYPE, entries], value);
for (const key of keys(value)) {
if (strict || !shouldSkip(typeOf(value[key])))
entries.push([pair(key), pair(value[key])]);
}
return index;
}
case DATE:
return as([TYPE, value.toISOString()], value);
case REGEXP: {
const { source, flags } = value;
return as([TYPE, { source, flags }], value);
}
case MAP: {
const entries = [];
const index = as([TYPE, entries], value);
for (const [key, entry] of value) {
if (strict || !(shouldSkip(typeOf(key)) || shouldSkip(typeOf(entry))))
entries.push([pair(key), pair(entry)]);
}
return index;
}
case SET: {
const entries = [];
const index = as([TYPE, entries], value);
for (const entry of value) {
if (strict || !shouldSkip(typeOf(entry)))
entries.push(pair(entry));
}
return index;
}
}
const { message } = value;
return as([TYPE, { name: type, message }], value);
};
return pair;
};
var serialize = (value, { json, lossy } = {}) => {
const _ = [];
return serializer(!(json || lossy), !!json, /* @__PURE__ */ new Map(), _)(value), _;
};
// node_modules/@ungap/structured-clone/esm/index.js
var esm_default = typeof structuredClone === "function" ? (
/* c8 ignore start */
(any, options) => options && ("json" in options || "lossy" in options) ? deserialize(serialize(any, options)) : structuredClone(any)
) : (any, options) => deserialize(serialize(any, options));
// node_modules/mdast-util-to-hast/lib/footer.js
function defaultFootnoteBackContent(_, rereferenceIndex) {
const result = [{ type: "text", value: "↩" }];
if (rereferenceIndex > 1) {
result.push({
type: "element",
tagName: "sup",
properties: {},
children: [{ type: "text", value: String(rereferenceIndex) }]
});
}
return result;
}
function defaultFootnoteBackLabel(referenceIndex, rereferenceIndex) {
return "Back to reference " + (referenceIndex + 1) + (rereferenceIndex > 1 ? "-" + rereferenceIndex : "");
}
function footer(state) {
const clobberPrefix = typeof state.options.clobberPrefix === "string" ? state.options.clobberPrefix : "user-content-";
const footnoteBackContent = state.options.footnoteBackContent || defaultFootnoteBackContent;
const footnoteBackLabel = state.options.footnoteBackLabel || defaultFootnoteBackLabel;
const footnoteLabel = state.options.footnoteLabel || "Footnotes";
const footnoteLabelTagName = state.options.footnoteLabelTagName || "h2";
const footnoteLabelProperties = state.options.footnoteLabelProperties || {
className: ["sr-only"]
};
const listItems = [];
let referenceIndex = -1;
while (++referenceIndex < state.footnoteOrder.length) {
const definition2 = state.footnoteById.get(
state.footnoteOrder[referenceIndex]
);
if (!definition2) {
continue;
}
const content3 = state.all(definition2);
const id = String(definition2.identifier).toUpperCase();
const safeId = normalizeUri(id.toLowerCase());
let rereferenceIndex = 0;
const backReferences = [];
const counts = state.footnoteCounts.get(id);
while (counts !== void 0 && ++rereferenceIndex <= counts) {
if (backReferences.length > 0) {
backReferences.push({ type: "text", value: " " });
}
let children = typeof footnoteBackContent === "string" ? footnoteBackContent : footnoteBackContent(referenceIndex, rereferenceIndex);
if (typeof children === "string") {
children = { type: "text", value: children };
}
backReferences.push({
type: "element",
tagName: "a",
properties: {
href: "#" + clobberPrefix + "fnref-" + safeId + (rereferenceIndex > 1 ? "-" + rereferenceIndex : ""),
dataFootnoteBackref: "",
ariaLabel: typeof footnoteBackLabel === "string" ? footnoteBackLabel : footnoteBackLabel(referenceIndex, rereferenceIndex),
className: ["data-footnote-backref"]
},
children: Array.isArray(children) ? children : [children]
});
}
const tail = content3[content3.length - 1];
if (tail && tail.type === "element" && tail.tagName === "p") {
const tailTail = tail.children[tail.children.length - 1];
if (tailTail && tailTail.type === "text") {
tailTail.value += " ";
} else {
tail.children.push({ type: "text", value: " " });
}
tail.children.push(...backReferences);
} else {
content3.push(...backReferences);
}
const listItem2 = {
type: "element",
tagName: "li",
properties: { id: clobberPrefix + "fn-" + safeId },
children: state.wrap(content3, true)
};
state.patch(definition2, listItem2);
listItems.push(listItem2);
}
if (listItems.length === 0) {
return;
}
return {
type: "element",
tagName: "section",
properties: { dataFootnotes: true, className: ["footnotes"] },
children: [
{
type: "element",
tagName: footnoteLabelTagName,
properties: {
...esm_default(footnoteLabelProperties),
id: "footnote-label"
},
children: [{ type: "text", value: footnoteLabel }]
},
{ type: "text", value: "\n" },
{
type: "element",
tagName: "ol",
properties: {},
children: state.wrap(listItems, true)
},
{ type: "text", value: "\n" }
]
};
}
// node_modules/unist-util-is/lib/index.js
var convert = (
// Note: overloads in JSDoc cant yet use different `@template`s.
/**
* @type {(
* (<Condition extends string>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & {type: Condition}) &
* (<Condition extends Props>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Condition) &
* (<Condition extends TestFunction>(test: Condition) => (node: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node & Predicate<Condition, Node>) &
* ((test?: null | undefined) => (node?: unknown, index?: number | null | undefined, parent?: Parent | null | undefined, context?: unknown) => node is Node) &
* ((test?: Test) => Check)
* )}
*/
/**
* @param {Test} [test]
* @returns {Check}
*/
(function(test) {
if (test === null || test === void 0) {
return ok2;
}
if (typeof test === "function") {
return castFactory(test);
}
if (typeof test === "object") {
return Array.isArray(test) ? anyFactory(test) : (
// Cast because `ReadonlyArray` goes into the above but `isArray`
// narrows to `Array`.
propertiesFactory(
/** @type {Props} */
test
)
);
}
if (typeof test === "string") {
return typeFactory(test);
}
throw new Error("Expected function, string, or object as test");
})
);
function anyFactory(tests) {
const checks = [];
let index = -1;
while (++index < tests.length) {
checks[index] = convert(tests[index]);
}
return castFactory(any);
function any(...parameters) {
let index2 = -1;
while (++index2 < checks.length) {
if (checks[index2].apply(this, parameters)) return true;
}
return false;
}
}
function propertiesFactory(check) {
const checkAsRecord = (
/** @type {Record<string, unknown>} */
check
);
return castFactory(all2);
function all2(node2) {
const nodeAsRecord = (
/** @type {Record<string, unknown>} */
/** @type {unknown} */
node2
);
let key;
for (key in check) {
if (nodeAsRecord[key] !== checkAsRecord[key]) return false;
}
return true;
}
}
function typeFactory(check) {
return castFactory(type);
function type(node2) {
return node2 && node2.type === check;
}
}
function castFactory(testFunction) {
return check;
function check(value, index, parent) {
return Boolean(
looksLikeANode(value) && testFunction.call(
this,
value,
typeof index === "number" ? index : void 0,
parent || void 0
)
);
}
}
function ok2() {
return true;
}
function looksLikeANode(value) {
return value !== null && typeof value === "object" && "type" in value;
}
// node_modules/unist-util-visit-parents/lib/color.js
function color(d) {
return d;
}
// node_modules/unist-util-visit-parents/lib/index.js
var empty = [];
var CONTINUE = true;
var EXIT = false;
var SKIP = "skip";
function visitParents(tree, test, visitor, reverse) {
let check;
if (typeof test === "function" && typeof visitor !== "function") {
reverse = visitor;
visitor = test;
} else {
check = test;
}
const is2 = convert(check);
const step = reverse ? -1 : 1;
factory(tree, void 0, [])();
function factory(node2, index, parents) {
const value = (
/** @type {Record<string, unknown>} */
node2 && typeof node2 === "object" ? node2 : {}
);
if (typeof value.type === "string") {
const name = (
// `hast`
typeof value.tagName === "string" ? value.tagName : (
// `xast`
typeof value.name === "string" ? value.name : void 0
)
);
Object.defineProperty(visit2, "name", {
value: "node (" + color(node2.type + (name ? "<" + name + ">" : "")) + ")"
});
}
return visit2;
function visit2() {
let result = empty;
let subresult;
let offset;
let grandparents;
if (!test || is2(node2, index, parents[parents.length - 1] || void 0)) {
result = toResult(visitor(node2, parents));
if (result[0] === EXIT) {
return result;
}
}
if ("children" in node2 && node2.children) {
const nodeAsParent = (
/** @type {UnistParent} */
node2
);
if (nodeAsParent.children && result[0] !== SKIP) {
offset = (reverse ? nodeAsParent.children.length : -1) + step;
grandparents = parents.concat(nodeAsParent);
while (offset > -1 && offset < nodeAsParent.children.length) {
const child = nodeAsParent.children[offset];
subresult = factory(child, offset, grandparents)();
if (subresult[0] === EXIT) {
return subresult;
}
offset = typeof subresult[1] === "number" ? subresult[1] : offset + step;
}
}
}
return result;
}
}
}
function toResult(value) {
if (Array.isArray(value)) {
return value;
}
if (typeof value === "number") {
return [CONTINUE, value];
}
return value === null || value === void 0 ? empty : [value];
}
// node_modules/unist-util-visit/lib/index.js
function visit(tree, testOrVisitor, visitorOrReverse, maybeReverse) {
let reverse;
let test;
let visitor;
if (typeof testOrVisitor === "function" && typeof visitorOrReverse !== "function") {
test = void 0;
visitor = testOrVisitor;
reverse = visitorOrReverse;
} else {
test = testOrVisitor;
visitor = visitorOrReverse;
reverse = maybeReverse;
}
visitParents(tree, test, overload, reverse);
function overload(node2, parents) {
const parent = parents[parents.length - 1];
const index = parent ? parent.children.indexOf(node2) : void 0;
return visitor(node2, index, parent);
}
}
// node_modules/mdast-util-to-hast/lib/state.js
var own2 = {}.hasOwnProperty;
var emptyOptions2 = {};
function createState(tree, options) {
const settings = options || emptyOptions2;
const definitionById = /* @__PURE__ */ new Map();
const footnoteById = /* @__PURE__ */ new Map();
const footnoteCounts = /* @__PURE__ */ new Map();
const handlers2 = { ...handlers, ...settings.handlers };
const state = {
all: all2,
applyData,
definitionById,
footnoteById,
footnoteCounts,
footnoteOrder: [],
handlers: handlers2,
one: one2,
options: settings,
patch,
wrap
};
visit(tree, function(node2) {
if (node2.type === "definition" || node2.type === "footnoteDefinition") {
const map = node2.type === "definition" ? definitionById : footnoteById;
const id = String(node2.identifier).toUpperCase();
if (!map.has(id)) {
map.set(id, node2);
}
}
});
return state;
function one2(node2, parent) {
const type = node2.type;
const handle = state.handlers[type];
if (own2.call(state.handlers, type) && handle) {
return handle(state, node2, parent);
}
if (state.options.passThrough && state.options.passThrough.includes(type)) {
if ("children" in node2) {
const { children, ...shallow } = node2;
const result = esm_default(shallow);
result.children = state.all(node2);
return result;
}
return esm_default(node2);
}
const unknown = state.options.unknownHandler || defaultUnknownHandler;
return unknown(state, node2, parent);
}
function all2(parent) {
const values = [];
if ("children" in parent) {
const nodes = parent.children;
let index = -1;
while (++index < nodes.length) {
const result = state.one(nodes[index], parent);
if (result) {
if (index && nodes[index - 1].type === "break") {
if (!Array.isArray(result) && result.type === "text") {
result.value = trimMarkdownSpaceStart(result.value);
}
if (!Array.isArray(result) && result.type === "element") {
const head = result.children[0];
if (head && head.type === "text") {
head.value = trimMarkdownSpaceStart(head.value);
}
}
}
if (Array.isArray(result)) {
values.push(...result);
} else {
values.push(result);
}
}
}
}
return values;
}
}
function patch(from, to) {
if (from.position) to.position = position(from);
}
function applyData(from, to) {
let result = to;
if (from && from.data) {
const hName = from.data.hName;
const hChildren = from.data.hChildren;
const hProperties = from.data.hProperties;
if (typeof hName === "string") {
if (result.type === "element") {
result.tagName = hName;
} else {
const children = "children" in result ? result.children : [result];
result = { type: "element", tagName: hName, properties: {}, children };
}
}
if (result.type === "element" && hProperties) {
Object.assign(result.properties, esm_default(hProperties));
}
if ("children" in result && result.children && hChildren !== null && hChildren !== void 0) {
result.children = hChildren;
}
}
return result;
}
function defaultUnknownHandler(state, node2) {
const data = node2.data || {};
const result = "value" in node2 && !(own2.call(data, "hProperties") || own2.call(data, "hChildren")) ? { type: "text", value: node2.value } : {
type: "element",
tagName: "div",
properties: {},
children: state.all(node2)
};
state.patch(node2, result);
return state.applyData(node2, result);
}
function wrap(nodes, loose) {
const result = [];
let index = -1;
if (loose) {
result.push({ type: "text", value: "\n" });
}
while (++index < nodes.length) {
if (index) result.push({ type: "text", value: "\n" });
result.push(nodes[index]);
}
if (loose && nodes.length > 0) {
result.push({ type: "text", value: "\n" });
}
return result;
}
function trimMarkdownSpaceStart(value) {
let index = 0;
let code2 = value.charCodeAt(index);
while (code2 === 9 || code2 === 32) {
index++;
code2 = value.charCodeAt(index);
}
return value.slice(index);
}
// node_modules/mdast-util-to-hast/lib/index.js
function toHast(tree, options) {
const state = createState(tree, options);
const node2 = state.one(tree, void 0);
const foot = footer(state);
const result = Array.isArray(node2) ? { type: "root", children: node2 } : node2 || { type: "root", children: [] };
if (foot) {
ok("children" in result);
result.children.push({ type: "text", value: "\n" }, foot);
}
return result;
}
// node_modules/remark-rehype/lib/index.js
function remarkRehype(destination, options) {
if (destination && "run" in destination) {
return async function(tree, file) {
const hastTree = (
/** @type {HastRoot} */
toHast(tree, { file, ...options })
);
await destination.run(hastTree, file);
};
}
return function(tree, file) {
return (
/** @type {HastRoot} */
toHast(tree, { file, ...destination || options })
);
};
}
// node_modules/bail/index.js
function bail(error) {
if (error) {
throw error;
}
}
// node_modules/unified/lib/index.js
var import_extend = __toESM(require_extend(), 1);
// node_modules/is-plain-obj/index.js
function isPlainObject(value) {
if (typeof value !== "object" || value === null) {
return false;
}
const prototype = Object.getPrototypeOf(value);
return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(Symbol.toStringTag in value) && !(Symbol.iterator in value);
}
// node_modules/trough/lib/index.js
function trough() {
const fns = [];
const pipeline = { run, use };
return pipeline;
function run(...values) {
let middlewareIndex = -1;
const callback = values.pop();
if (typeof callback !== "function") {
throw new TypeError("Expected function as last argument, not " + callback);
}
next(null, ...values);
function next(error, ...output) {
const fn = fns[++middlewareIndex];
let index = -1;
if (error) {
callback(error);
return;
}
while (++index < values.length) {
if (output[index] === null || output[index] === void 0) {
output[index] = values[index];
}
}
values = output;
if (fn) {
wrap2(fn, next)(...output);
} else {
callback(null, ...output);
}
}
}
function use(middelware) {
if (typeof middelware !== "function") {
throw new TypeError(
"Expected `middelware` to be a function, not " + middelware
);
}
fns.push(middelware);
return pipeline;
}
}
function wrap2(middleware, callback) {
let called;
return wrapped;
function wrapped(...parameters) {
const fnExpectsCallback = middleware.length > parameters.length;
let result;
if (fnExpectsCallback) {
parameters.push(done);
}
try {
result = middleware.apply(this, parameters);
} catch (error) {
const exception = (
/** @type {Error} */
error
);
if (fnExpectsCallback && called) {
throw exception;
}
return done(exception);
}
if (!fnExpectsCallback) {
if (result && result.then && typeof result.then === "function") {
result.then(then, done);
} else if (result instanceof Error) {
done(result);
} else {
then(result);
}
}
}
function done(error, ...output) {
if (!called) {
called = true;
callback(error, ...output);
}
}
function then(value) {
done(null, value);
}
}
// node_modules/vfile/lib/minpath.browser.js
var minpath = { basename, dirname, extname, join, sep: "/" };
function basename(path, extname2) {
if (extname2 !== void 0 && typeof extname2 !== "string") {
throw new TypeError('"ext" argument must be a string');
}
assertPath(path);
let start = 0;
let end = -1;
let index = path.length;
let seenNonSlash;
if (extname2 === void 0 || extname2.length === 0 || extname2.length > path.length) {
while (index--) {
if (path.codePointAt(index) === 47) {
if (seenNonSlash) {
start = index + 1;
break;
}
} else if (end < 0) {
seenNonSlash = true;
end = index + 1;
}
}
return end < 0 ? "" : path.slice(start, end);
}
if (extname2 === path) {
return "";
}
let firstNonSlashEnd = -1;
let extnameIndex = extname2.length - 1;
while (index--) {
if (path.codePointAt(index) === 47) {
if (seenNonSlash) {
start = index + 1;
break;
}
} else {
if (firstNonSlashEnd < 0) {
seenNonSlash = true;
firstNonSlashEnd = index + 1;
}
if (extnameIndex > -1) {
if (path.codePointAt(index) === extname2.codePointAt(extnameIndex--)) {
if (extnameIndex < 0) {
end = index;
}
} else {
extnameIndex = -1;
end = firstNonSlashEnd;
}
}
}
}
if (start === end) {
end = firstNonSlashEnd;
} else if (end < 0) {
end = path.length;
}
return path.slice(start, end);
}
function dirname(path) {
assertPath(path);
if (path.length === 0) {
return ".";
}
let end = -1;
let index = path.length;
let unmatchedSlash;
while (--index) {
if (path.codePointAt(index) === 47) {
if (unmatchedSlash) {
end = index;
break;
}
} else if (!unmatchedSlash) {
unmatchedSlash = true;
}
}
return end < 0 ? path.codePointAt(0) === 47 ? "/" : "." : end === 1 && path.codePointAt(0) === 47 ? "//" : path.slice(0, end);
}
function extname(path) {
assertPath(path);
let index = path.length;
let end = -1;
let startPart = 0;
let startDot = -1;
let preDotState = 0;
let unmatchedSlash;
while (index--) {
const code2 = path.codePointAt(index);
if (code2 === 47) {
if (unmatchedSlash) {
startPart = index + 1;
break;
}
continue;
}
if (end < 0) {
unmatchedSlash = true;
end = index + 1;
}
if (code2 === 46) {
if (startDot < 0) {
startDot = index;
} else if (preDotState !== 1) {
preDotState = 1;
}
} else if (startDot > -1) {
preDotState = -1;
}
}
if (startDot < 0 || end < 0 || // We saw a non-dot character immediately before the dot.
preDotState === 0 || // The (right-most) trimmed path component is exactly `..`.
preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) {
return "";
}
return path.slice(startDot, end);
}
function join(...segments) {
let index = -1;
let joined;
while (++index < segments.length) {
assertPath(segments[index]);
if (segments[index]) {
joined = joined === void 0 ? segments[index] : joined + "/" + segments[index];
}
}
return joined === void 0 ? "." : normalize(joined);
}
function normalize(path) {
assertPath(path);
const absolute = path.codePointAt(0) === 47;
let value = normalizeString(path, !absolute);
if (value.length === 0 && !absolute) {
value = ".";
}
if (value.length > 0 && path.codePointAt(path.length - 1) === 47) {
value += "/";
}
return absolute ? "/" + value : value;
}
function normalizeString(path, allowAboveRoot) {
let result = "";
let lastSegmentLength = 0;
let lastSlash = -1;
let dots = 0;
let index = -1;
let code2;
let lastSlashIndex;
while (++index <= path.length) {
if (index < path.length) {
code2 = path.codePointAt(index);
} else if (code2 === 47) {
break;
} else {
code2 = 47;
}
if (code2 === 47) {
if (lastSlash === index - 1 || dots === 1) {
} else if (lastSlash !== index - 1 && dots === 2) {
if (result.length < 2 || lastSegmentLength !== 2 || result.codePointAt(result.length - 1) !== 46 || result.codePointAt(result.length - 2) !== 46) {
if (result.length > 2) {
lastSlashIndex = result.lastIndexOf("/");
if (lastSlashIndex !== result.length - 1) {
if (lastSlashIndex < 0) {
result = "";
lastSegmentLength = 0;
} else {
result = result.slice(0, lastSlashIndex);
lastSegmentLength = result.length - 1 - result.lastIndexOf("/");
}
lastSlash = index;
dots = 0;
continue;
}
} else if (result.length > 0) {
result = "";
lastSegmentLength = 0;
lastSlash = index;
dots = 0;
continue;
}
}
if (allowAboveRoot) {
result = result.length > 0 ? result + "/.." : "..";
lastSegmentLength = 2;
}
} else {
if (result.length > 0) {
result += "/" + path.slice(lastSlash + 1, index);
} else {
result = path.slice(lastSlash + 1, index);
}
lastSegmentLength = index - lastSlash - 1;
}
lastSlash = index;
dots = 0;
} else if (code2 === 46 && dots > -1) {
dots++;
} else {
dots = -1;
}
}
return result;
}
function assertPath(path) {
if (typeof path !== "string") {
throw new TypeError(
"Path must be a string. Received " + JSON.stringify(path)
);
}
}
// node_modules/vfile/lib/minproc.browser.js
var minproc = { cwd };
function cwd() {
return "/";
}
// node_modules/vfile/lib/minurl.shared.js
function isUrl(fileUrlOrPath) {
return Boolean(
fileUrlOrPath !== null && typeof fileUrlOrPath === "object" && "href" in fileUrlOrPath && fileUrlOrPath.href && "protocol" in fileUrlOrPath && fileUrlOrPath.protocol && // @ts-expect-error: indexing is fine.
fileUrlOrPath.auth === void 0
);
}
// node_modules/vfile/lib/minurl.browser.js
function urlToPath(path) {
if (typeof path === "string") {
path = new URL(path);
} else if (!isUrl(path)) {
const error = new TypeError(
'The "path" argument must be of type string or an instance of URL. Received `' + path + "`"
);
error.code = "ERR_INVALID_ARG_TYPE";
throw error;
}
if (path.protocol !== "file:") {
const error = new TypeError("The URL must be of scheme file");
error.code = "ERR_INVALID_URL_SCHEME";
throw error;
}
return getPathFromURLPosix(path);
}
function getPathFromURLPosix(url) {
if (url.hostname !== "") {
const error = new TypeError(
'File URL host must be "localhost" or empty on darwin'
);
error.code = "ERR_INVALID_FILE_URL_HOST";
throw error;
}
const pathname = url.pathname;
let index = -1;
while (++index < pathname.length) {
if (pathname.codePointAt(index) === 37 && pathname.codePointAt(index + 1) === 50) {
const third = pathname.codePointAt(index + 2);
if (third === 70 || third === 102) {
const error = new TypeError(
"File URL path must not include encoded / characters"
);
error.code = "ERR_INVALID_FILE_URL_PATH";
throw error;
}
}
}
return decodeURIComponent(pathname);
}
// node_modules/vfile/lib/index.js
var order = (
/** @type {const} */
[
"history",
"path",
"basename",
"stem",
"extname",
"dirname"
]
);
var VFile = class {
/**
* Create a new virtual file.
*
* `options` is treated as:
*
* * `string` or `Uint8Array` — `{value: options}`
* * `URL` — `{path: options}`
* * `VFile` — shallow copies its data over to the new file
* * `object` — all fields are shallow copied over to the new file
*
* Path related fields are set in the following order (least specific to
* most specific): `history`, `path`, `basename`, `stem`, `extname`,
* `dirname`.
*
* You cannot set `dirname` or `extname` without setting either `history`,
* `path`, `basename`, or `stem` too.
*
* @param {Compatible | null | undefined} [value]
* File value.
* @returns
* New instance.
*/
constructor(value) {
let options;
if (!value) {
options = {};
} else if (isUrl(value)) {
options = { path: value };
} else if (typeof value === "string" || isUint8Array(value)) {
options = { value };
} else {
options = value;
}
this.cwd = "cwd" in options ? "" : minproc.cwd();
this.data = {};
this.history = [];
this.messages = [];
this.value;
this.map;
this.result;
this.stored;
let index = -1;
while (++index < order.length) {
const field2 = order[index];
if (field2 in options && options[field2] !== void 0 && options[field2] !== null) {
this[field2] = field2 === "history" ? [...options[field2]] : options[field2];
}
}
let field;
for (field in options) {
if (!order.includes(field)) {
this[field] = options[field];
}
}
}
/**
* Get the basename (including extname) (example: `'index.min.js'`).
*
* @returns {string | undefined}
* Basename.
*/
get basename() {
return typeof this.path === "string" ? minpath.basename(this.path) : void 0;
}
/**
* Set basename (including extname) (`'index.min.js'`).
*
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
* on windows).
* Cannot be nullified (use `file.path = file.dirname` instead).
*
* @param {string} basename
* Basename.
* @returns {undefined}
* Nothing.
*/
set basename(basename2) {
assertNonEmpty(basename2, "basename");
assertPart(basename2, "basename");
this.path = minpath.join(this.dirname || "", basename2);
}
/**
* Get the parent path (example: `'~'`).
*
* @returns {string | undefined}
* Dirname.
*/
get dirname() {
return typeof this.path === "string" ? minpath.dirname(this.path) : void 0;
}
/**
* Set the parent path (example: `'~'`).
*
* Cannot be set if theres no `path` yet.
*
* @param {string | undefined} dirname
* Dirname.
* @returns {undefined}
* Nothing.
*/
set dirname(dirname2) {
assertPath2(this.basename, "dirname");
this.path = minpath.join(dirname2 || "", this.basename);
}
/**
* Get the extname (including dot) (example: `'.js'`).
*
* @returns {string | undefined}
* Extname.
*/
get extname() {
return typeof this.path === "string" ? minpath.extname(this.path) : void 0;
}
/**
* Set the extname (including dot) (example: `'.js'`).
*
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
* on windows).
* Cannot be set if theres no `path` yet.
*
* @param {string | undefined} extname
* Extname.
* @returns {undefined}
* Nothing.
*/
set extname(extname2) {
assertPart(extname2, "extname");
assertPath2(this.dirname, "extname");
if (extname2) {
if (extname2.codePointAt(0) !== 46) {
throw new Error("`extname` must start with `.`");
}
if (extname2.includes(".", 1)) {
throw new Error("`extname` cannot contain multiple dots");
}
}
this.path = minpath.join(this.dirname, this.stem + (extname2 || ""));
}
/**
* Get the full path (example: `'~/index.min.js'`).
*
* @returns {string}
* Path.
*/
get path() {
return this.history[this.history.length - 1];
}
/**
* Set the full path (example: `'~/index.min.js'`).
*
* Cannot be nullified.
* You can set a file URL (a `URL` object with a `file:` protocol) which will
* be turned into a path with `url.fileURLToPath`.
*
* @param {URL | string} path
* Path.
* @returns {undefined}
* Nothing.
*/
set path(path) {
if (isUrl(path)) {
path = urlToPath(path);
}
assertNonEmpty(path, "path");
if (this.path !== path) {
this.history.push(path);
}
}
/**
* Get the stem (basename w/o extname) (example: `'index.min'`).
*
* @returns {string | undefined}
* Stem.
*/
get stem() {
return typeof this.path === "string" ? minpath.basename(this.path, this.extname) : void 0;
}
/**
* Set the stem (basename w/o extname) (example: `'index.min'`).
*
* Cannot contain path separators (`'/'` on unix, macOS, and browsers, `'\'`
* on windows).
* Cannot be nullified (use `file.path = file.dirname` instead).
*
* @param {string} stem
* Stem.
* @returns {undefined}
* Nothing.
*/
set stem(stem) {
assertNonEmpty(stem, "stem");
assertPart(stem, "stem");
this.path = minpath.join(this.dirname || "", stem + (this.extname || ""));
}
// Normal prototypal methods.
/**
* Create a fatal message for `reason` associated with the file.
*
* The `fatal` field of the message is set to `true` (error; file not usable)
* and the `file` field is set to the current file path.
* The message is added to the `messages` field on `file`.
*
* > 🪦 **Note**: also has obsolete signatures.
*
* @overload
* @param {string} reason
* @param {MessageOptions | null | undefined} [options]
* @returns {never}
*
* @overload
* @param {string} reason
* @param {Node | NodeLike | null | undefined} parent
* @param {string | null | undefined} [origin]
* @returns {never}
*
* @overload
* @param {string} reason
* @param {Point | Position | null | undefined} place
* @param {string | null | undefined} [origin]
* @returns {never}
*
* @overload
* @param {string} reason
* @param {string | null | undefined} [origin]
* @returns {never}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {Node | NodeLike | null | undefined} parent
* @param {string | null | undefined} [origin]
* @returns {never}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {Point | Position | null | undefined} place
* @param {string | null | undefined} [origin]
* @returns {never}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {string | null | undefined} [origin]
* @returns {never}
*
* @param {Error | VFileMessage | string} causeOrReason
* Reason for message, should use markdown.
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
* Configuration (optional).
* @param {string | null | undefined} [origin]
* Place in code where the message originates (example:
* `'my-package:my-rule'` or `'my-rule'`).
* @returns {never}
* Never.
* @throws {VFileMessage}
* Message.
*/
fail(causeOrReason, optionsOrParentOrPlace, origin) {
const message = this.message(causeOrReason, optionsOrParentOrPlace, origin);
message.fatal = true;
throw message;
}
/**
* Create an info message for `reason` associated with the file.
*
* The `fatal` field of the message is set to `undefined` (info; change
* likely not needed) and the `file` field is set to the current file path.
* The message is added to the `messages` field on `file`.
*
* > 🪦 **Note**: also has obsolete signatures.
*
* @overload
* @param {string} reason
* @param {MessageOptions | null | undefined} [options]
* @returns {VFileMessage}
*
* @overload
* @param {string} reason
* @param {Node | NodeLike | null | undefined} parent
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {string} reason
* @param {Point | Position | null | undefined} place
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {string} reason
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {Node | NodeLike | null | undefined} parent
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {Point | Position | null | undefined} place
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @param {Error | VFileMessage | string} causeOrReason
* Reason for message, should use markdown.
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
* Configuration (optional).
* @param {string | null | undefined} [origin]
* Place in code where the message originates (example:
* `'my-package:my-rule'` or `'my-rule'`).
* @returns {VFileMessage}
* Message.
*/
info(causeOrReason, optionsOrParentOrPlace, origin) {
const message = this.message(causeOrReason, optionsOrParentOrPlace, origin);
message.fatal = void 0;
return message;
}
/**
* Create a message for `reason` associated with the file.
*
* The `fatal` field of the message is set to `false` (warning; change may be
* needed) and the `file` field is set to the current file path.
* The message is added to the `messages` field on `file`.
*
* > 🪦 **Note**: also has obsolete signatures.
*
* @overload
* @param {string} reason
* @param {MessageOptions | null | undefined} [options]
* @returns {VFileMessage}
*
* @overload
* @param {string} reason
* @param {Node | NodeLike | null | undefined} parent
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {string} reason
* @param {Point | Position | null | undefined} place
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {string} reason
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {Node | NodeLike | null | undefined} parent
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {Point | Position | null | undefined} place
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @overload
* @param {Error | VFileMessage} cause
* @param {string | null | undefined} [origin]
* @returns {VFileMessage}
*
* @param {Error | VFileMessage | string} causeOrReason
* Reason for message, should use markdown.
* @param {Node | NodeLike | MessageOptions | Point | Position | string | null | undefined} [optionsOrParentOrPlace]
* Configuration (optional).
* @param {string | null | undefined} [origin]
* Place in code where the message originates (example:
* `'my-package:my-rule'` or `'my-rule'`).
* @returns {VFileMessage}
* Message.
*/
message(causeOrReason, optionsOrParentOrPlace, origin) {
const message = new VFileMessage(
// @ts-expect-error: the overloads are fine.
causeOrReason,
optionsOrParentOrPlace,
origin
);
if (this.path) {
message.name = this.path + ":" + message.name;
message.file = this.path;
}
message.fatal = false;
this.messages.push(message);
return message;
}
/**
* Serialize the file.
*
* > **Note**: which encodings are supported depends on the engine.
* > For info on Node.js, see:
* > <https://nodejs.org/api/util.html#whatwg-supported-encodings>.
*
* @param {string | null | undefined} [encoding='utf8']
* Character encoding to understand `value` as when its a `Uint8Array`
* (default: `'utf-8'`).
* @returns {string}
* Serialized file.
*/
toString(encoding) {
if (this.value === void 0) {
return "";
}
if (typeof this.value === "string") {
return this.value;
}
const decoder = new TextDecoder(encoding || void 0);
return decoder.decode(this.value);
}
};
function assertPart(part, name) {
if (part && part.includes(minpath.sep)) {
throw new Error(
"`" + name + "` cannot be a path: did not expect `" + minpath.sep + "`"
);
}
}
function assertNonEmpty(part, name) {
if (!part) {
throw new Error("`" + name + "` cannot be empty");
}
}
function assertPath2(path, name) {
if (!path) {
throw new Error("Setting `" + name + "` requires `path` to be set too");
}
}
function isUint8Array(value) {
return Boolean(
value && typeof value === "object" && "byteLength" in value && "byteOffset" in value
);
}
// node_modules/unified/lib/callable-instance.js
var CallableInstance = (
/**
* @type {new <Parameters extends Array<unknown>, Result>(property: string | symbol) => (...parameters: Parameters) => Result}
*/
/** @type {unknown} */
/**
* @this {Function}
* @param {string | symbol} property
* @returns {(...parameters: Array<unknown>) => unknown}
*/
(function(property) {
const self2 = this;
const constr = self2.constructor;
const proto = (
/** @type {Record<string | symbol, Function>} */
// Prototypes do exist.
// type-coverage:ignore-next-line
constr.prototype
);
const value = proto[property];
const apply = function() {
return value.apply(apply, arguments);
};
Object.setPrototypeOf(apply, proto);
return apply;
})
);
// node_modules/unified/lib/index.js
var own3 = {}.hasOwnProperty;
var Processor = class _Processor extends CallableInstance {
/**
* Create a processor.
*/
constructor() {
super("copy");
this.Compiler = void 0;
this.Parser = void 0;
this.attachers = [];
this.compiler = void 0;
this.freezeIndex = -1;
this.frozen = void 0;
this.namespace = {};
this.parser = void 0;
this.transformers = trough();
}
/**
* Copy a processor.
*
* @deprecated
* This is a private internal method and should not be used.
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
* New *unfrozen* processor ({@linkcode Processor}) that is
* configured to work the same as its ancestor.
* When the descendant processor is configured in the future it does not
* affect the ancestral processor.
*/
copy() {
const destination = (
/** @type {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>} */
new _Processor()
);
let index = -1;
while (++index < this.attachers.length) {
const attacher = this.attachers[index];
destination.use(...attacher);
}
destination.data((0, import_extend.default)(true, {}, this.namespace));
return destination;
}
/**
* Configure the processor with info available to all plugins.
* Information is stored in an object.
*
* Typically, options can be given to a specific plugin, but sometimes it
* makes sense to have information shared with several plugins.
* For example, a list of HTML elements that are self-closing, which is
* needed during all phases.
*
* > **Note**: setting information cannot occur on *frozen* processors.
* > Call the processor first to create a new unfrozen processor.
*
* > **Note**: to register custom data in TypeScript, augment the
* > {@linkcode Data} interface.
*
* @example
* This example show how to get and set info:
*
* ```js
* import {unified} from 'unified'
*
* const processor = unified().data('alpha', 'bravo')
*
* processor.data('alpha') // => 'bravo'
*
* processor.data() // => {alpha: 'bravo'}
*
* processor.data({charlie: 'delta'})
*
* processor.data() // => {charlie: 'delta'}
* ```
*
* @template {keyof Data} Key
*
* @overload
* @returns {Data}
*
* @overload
* @param {Data} dataset
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
*
* @overload
* @param {Key} key
* @returns {Data[Key]}
*
* @overload
* @param {Key} key
* @param {Data[Key]} value
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
*
* @param {Data | Key} [key]
* Key to get or set, or entire dataset to set, or nothing to get the
* entire dataset (optional).
* @param {Data[Key]} [value]
* Value to set (optional).
* @returns {unknown}
* The current processor when setting, the value at `key` when getting, or
* the entire dataset when getting without key.
*/
data(key, value) {
if (typeof key === "string") {
if (arguments.length === 2) {
assertUnfrozen("data", this.frozen);
this.namespace[key] = value;
return this;
}
return own3.call(this.namespace, key) && this.namespace[key] || void 0;
}
if (key) {
assertUnfrozen("data", this.frozen);
this.namespace = key;
return this;
}
return this.namespace;
}
/**
* Freeze a processor.
*
* Frozen processors are meant to be extended and not to be configured
* directly.
*
* When a processor is frozen it cannot be unfrozen.
* New processors working the same way can be created by calling the
* processor.
*
* Its possible to freeze processors explicitly by calling `.freeze()`.
* Processors freeze automatically when `.parse()`, `.run()`, `.runSync()`,
* `.stringify()`, `.process()`, or `.processSync()` are called.
*
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
* The current processor.
*/
freeze() {
if (this.frozen) {
return this;
}
const self2 = (
/** @type {Processor} */
/** @type {unknown} */
this
);
while (++this.freezeIndex < this.attachers.length) {
const [attacher, ...options] = this.attachers[this.freezeIndex];
if (options[0] === false) {
continue;
}
if (options[0] === true) {
options[0] = void 0;
}
const transformer = attacher.call(self2, ...options);
if (typeof transformer === "function") {
this.transformers.use(transformer);
}
}
this.frozen = true;
this.freezeIndex = Number.POSITIVE_INFINITY;
return this;
}
/**
* Parse text to a syntax tree.
*
* > **Note**: `parse` freezes the processor if not already *frozen*.
*
* > **Note**: `parse` performs the parse phase, not the run phase or other
* > phases.
*
* @param {Compatible | undefined} [file]
* file to parse (optional); typically `string` or `VFile`; any value
* accepted as `x` in `new VFile(x)`.
* @returns {ParseTree extends undefined ? Node : ParseTree}
* Syntax tree representing `file`.
*/
parse(file) {
this.freeze();
const realFile = vfile(file);
const parser = this.parser || this.Parser;
assertParser("parse", parser);
return parser(String(realFile), realFile);
}
/**
* Process the given file as configured on the processor.
*
* > **Note**: `process` freezes the processor if not already *frozen*.
*
* > **Note**: `process` performs the parse, run, and stringify phases.
*
* @overload
* @param {Compatible | undefined} file
* @param {ProcessCallback<VFileWithOutput<CompileResult>>} done
* @returns {undefined}
*
* @overload
* @param {Compatible | undefined} [file]
* @returns {Promise<VFileWithOutput<CompileResult>>}
*
* @param {Compatible | undefined} [file]
* File (optional); typically `string` or `VFile`]; any value accepted as
* `x` in `new VFile(x)`.
* @param {ProcessCallback<VFileWithOutput<CompileResult>> | undefined} [done]
* Callback (optional).
* @returns {Promise<VFile> | undefined}
* Nothing if `done` is given.
* Otherwise a promise, rejected with a fatal error or resolved with the
* processed file.
*
* The parsed, transformed, and compiled value is available at
* `file.value` (see note).
*
* > **Note**: unified typically compiles by serializing: most
* > compilers return `string` (or `Uint8Array`).
* > Some compilers, such as the one configured with
* > [`rehype-react`][rehype-react], return other values (in this case, a
* > React tree).
* > If youre using a compiler that doesnt serialize, expect different
* > result values.
* >
* > To register custom results in TypeScript, add them to
* > {@linkcode CompileResultMap}.
*
* [rehype-react]: https://github.com/rehypejs/rehype-react
*/
process(file, done) {
const self2 = this;
this.freeze();
assertParser("process", this.parser || this.Parser);
assertCompiler("process", this.compiler || this.Compiler);
return done ? executor(void 0, done) : new Promise(executor);
function executor(resolve, reject) {
const realFile = vfile(file);
const parseTree = (
/** @type {HeadTree extends undefined ? Node : HeadTree} */
/** @type {unknown} */
self2.parse(realFile)
);
self2.run(parseTree, realFile, function(error, tree, file2) {
if (error || !tree || !file2) {
return realDone(error);
}
const compileTree = (
/** @type {CompileTree extends undefined ? Node : CompileTree} */
/** @type {unknown} */
tree
);
const compileResult = self2.stringify(compileTree, file2);
if (looksLikeAValue(compileResult)) {
file2.value = compileResult;
} else {
file2.result = compileResult;
}
realDone(
error,
/** @type {VFileWithOutput<CompileResult>} */
file2
);
});
function realDone(error, file2) {
if (error || !file2) {
reject(error);
} else if (resolve) {
resolve(file2);
} else {
ok(done, "`done` is defined if `resolve` is not");
done(void 0, file2);
}
}
}
}
/**
* Process the given file as configured on the processor.
*
* An error is thrown if asynchronous transforms are configured.
*
* > **Note**: `processSync` freezes the processor if not already *frozen*.
*
* > **Note**: `processSync` performs the parse, run, and stringify phases.
*
* @param {Compatible | undefined} [file]
* File (optional); typically `string` or `VFile`; any value accepted as
* `x` in `new VFile(x)`.
* @returns {VFileWithOutput<CompileResult>}
* The processed file.
*
* The parsed, transformed, and compiled value is available at
* `file.value` (see note).
*
* > **Note**: unified typically compiles by serializing: most
* > compilers return `string` (or `Uint8Array`).
* > Some compilers, such as the one configured with
* > [`rehype-react`][rehype-react], return other values (in this case, a
* > React tree).
* > If youre using a compiler that doesnt serialize, expect different
* > result values.
* >
* > To register custom results in TypeScript, add them to
* > {@linkcode CompileResultMap}.
*
* [rehype-react]: https://github.com/rehypejs/rehype-react
*/
processSync(file) {
let complete = false;
let result;
this.freeze();
assertParser("processSync", this.parser || this.Parser);
assertCompiler("processSync", this.compiler || this.Compiler);
this.process(file, realDone);
assertDone("processSync", "process", complete);
ok(result, "we either bailed on an error or have a tree");
return result;
function realDone(error, file2) {
complete = true;
bail(error);
result = file2;
}
}
/**
* Run *transformers* on a syntax tree.
*
* > **Note**: `run` freezes the processor if not already *frozen*.
*
* > **Note**: `run` performs the run phase, not other phases.
*
* @overload
* @param {HeadTree extends undefined ? Node : HeadTree} tree
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} done
* @returns {undefined}
*
* @overload
* @param {HeadTree extends undefined ? Node : HeadTree} tree
* @param {Compatible | undefined} file
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} done
* @returns {undefined}
*
* @overload
* @param {HeadTree extends undefined ? Node : HeadTree} tree
* @param {Compatible | undefined} [file]
* @returns {Promise<TailTree extends undefined ? Node : TailTree>}
*
* @param {HeadTree extends undefined ? Node : HeadTree} tree
* Tree to transform and inspect.
* @param {(
* RunCallback<TailTree extends undefined ? Node : TailTree> |
* Compatible
* )} [file]
* File associated with `node` (optional); any value accepted as `x` in
* `new VFile(x)`.
* @param {RunCallback<TailTree extends undefined ? Node : TailTree>} [done]
* Callback (optional).
* @returns {Promise<TailTree extends undefined ? Node : TailTree> | undefined}
* Nothing if `done` is given.
* Otherwise, a promise rejected with a fatal error or resolved with the
* transformed tree.
*/
run(tree, file, done) {
assertNode(tree);
this.freeze();
const transformers = this.transformers;
if (!done && typeof file === "function") {
done = file;
file = void 0;
}
return done ? executor(void 0, done) : new Promise(executor);
function executor(resolve, reject) {
ok(
typeof file !== "function",
"`file` cant be a `done` anymore, we checked"
);
const realFile = vfile(file);
transformers.run(tree, realFile, realDone);
function realDone(error, outputTree, file2) {
const resultingTree = (
/** @type {TailTree extends undefined ? Node : TailTree} */
outputTree || tree
);
if (error) {
reject(error);
} else if (resolve) {
resolve(resultingTree);
} else {
ok(done, "`done` is defined if `resolve` is not");
done(void 0, resultingTree, file2);
}
}
}
}
/**
* Run *transformers* on a syntax tree.
*
* An error is thrown if asynchronous transforms are configured.
*
* > **Note**: `runSync` freezes the processor if not already *frozen*.
*
* > **Note**: `runSync` performs the run phase, not other phases.
*
* @param {HeadTree extends undefined ? Node : HeadTree} tree
* Tree to transform and inspect.
* @param {Compatible | undefined} [file]
* File associated with `node` (optional); any value accepted as `x` in
* `new VFile(x)`.
* @returns {TailTree extends undefined ? Node : TailTree}
* Transformed tree.
*/
runSync(tree, file) {
let complete = false;
let result;
this.run(tree, file, realDone);
assertDone("runSync", "run", complete);
ok(result, "we either bailed on an error or have a tree");
return result;
function realDone(error, tree2) {
bail(error);
result = tree2;
complete = true;
}
}
/**
* Compile a syntax tree.
*
* > **Note**: `stringify` freezes the processor if not already *frozen*.
*
* > **Note**: `stringify` performs the stringify phase, not the run phase
* > or other phases.
*
* @param {CompileTree extends undefined ? Node : CompileTree} tree
* Tree to compile.
* @param {Compatible | undefined} [file]
* File associated with `node` (optional); any value accepted as `x` in
* `new VFile(x)`.
* @returns {CompileResult extends undefined ? Value : CompileResult}
* Textual representation of the tree (see note).
*
* > **Note**: unified typically compiles by serializing: most compilers
* > return `string` (or `Uint8Array`).
* > Some compilers, such as the one configured with
* > [`rehype-react`][rehype-react], return other values (in this case, a
* > React tree).
* > If youre using a compiler that doesnt serialize, expect different
* > result values.
* >
* > To register custom results in TypeScript, add them to
* > {@linkcode CompileResultMap}.
*
* [rehype-react]: https://github.com/rehypejs/rehype-react
*/
stringify(tree, file) {
this.freeze();
const realFile = vfile(file);
const compiler2 = this.compiler || this.Compiler;
assertCompiler("stringify", compiler2);
assertNode(tree);
return compiler2(tree, realFile);
}
/**
* Configure the processor to use a plugin, a list of usable values, or a
* preset.
*
* If the processor is already using a plugin, the previous plugin
* configuration is changed based on the options that are passed in.
* In other words, the plugin is not added a second time.
*
* > **Note**: `use` cannot be called on *frozen* processors.
* > Call the processor first to create a new unfrozen processor.
*
* @example
* There are many ways to pass plugins to `.use()`.
* This example gives an overview:
*
* ```js
* import {unified} from 'unified'
*
* unified()
* // Plugin with options:
* .use(pluginA, {x: true, y: true})
* // Passing the same plugin again merges configuration (to `{x: true, y: false, z: true}`):
* .use(pluginA, {y: false, z: true})
* // Plugins:
* .use([pluginB, pluginC])
* // Two plugins, the second with options:
* .use([pluginD, [pluginE, {}]])
* // Preset with plugins and settings:
* .use({plugins: [pluginF, [pluginG, {}]], settings: {position: false}})
* // Settings only:
* .use({settings: {position: false}})
* ```
*
* @template {Array<unknown>} [Parameters=[]]
* @template {Node | string | undefined} [Input=undefined]
* @template [Output=Input]
*
* @overload
* @param {Preset | null | undefined} [preset]
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
*
* @overload
* @param {PluggableList} list
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
*
* @overload
* @param {Plugin<Parameters, Input, Output>} plugin
* @param {...(Parameters | [boolean])} parameters
* @returns {UsePlugin<ParseTree, HeadTree, TailTree, CompileTree, CompileResult, Input, Output>}
*
* @param {PluggableList | Plugin | Preset | null | undefined} value
* Usable value.
* @param {...unknown} parameters
* Parameters, when a plugin is given as a usable value.
* @returns {Processor<ParseTree, HeadTree, TailTree, CompileTree, CompileResult>}
* Current processor.
*/
use(value, ...parameters) {
const attachers = this.attachers;
const namespace = this.namespace;
assertUnfrozen("use", this.frozen);
if (value === null || value === void 0) {
} else if (typeof value === "function") {
addPlugin(value, parameters);
} else if (typeof value === "object") {
if (Array.isArray(value)) {
addList(value);
} else {
addPreset(value);
}
} else {
throw new TypeError("Expected usable value, not `" + value + "`");
}
return this;
function add(value2) {
if (typeof value2 === "function") {
addPlugin(value2, []);
} else if (typeof value2 === "object") {
if (Array.isArray(value2)) {
const [plugin, ...parameters2] = (
/** @type {PluginTuple<Array<unknown>>} */
value2
);
addPlugin(plugin, parameters2);
} else {
addPreset(value2);
}
} else {
throw new TypeError("Expected usable value, not `" + value2 + "`");
}
}
function addPreset(result) {
if (!("plugins" in result) && !("settings" in result)) {
throw new Error(
"Expected usable value but received an empty preset, which is probably a mistake: presets typically come with `plugins` and sometimes with `settings`, but this has neither"
);
}
addList(result.plugins);
if (result.settings) {
namespace.settings = (0, import_extend.default)(true, namespace.settings, result.settings);
}
}
function addList(plugins) {
let index = -1;
if (plugins === null || plugins === void 0) {
} else if (Array.isArray(plugins)) {
while (++index < plugins.length) {
const thing = plugins[index];
add(thing);
}
} else {
throw new TypeError("Expected a list of plugins, not `" + plugins + "`");
}
}
function addPlugin(plugin, parameters2) {
let index = -1;
let entryIndex = -1;
while (++index < attachers.length) {
if (attachers[index][0] === plugin) {
entryIndex = index;
break;
}
}
if (entryIndex === -1) {
attachers.push([plugin, ...parameters2]);
} else if (parameters2.length > 0) {
let [primary, ...rest] = parameters2;
const currentPrimary = attachers[entryIndex][1];
if (isPlainObject(currentPrimary) && isPlainObject(primary)) {
primary = (0, import_extend.default)(true, currentPrimary, primary);
}
attachers[entryIndex] = [plugin, primary, ...rest];
}
}
}
};
var unified = new Processor().freeze();
function assertParser(name, value) {
if (typeof value !== "function") {
throw new TypeError("Cannot `" + name + "` without `parser`");
}
}
function assertCompiler(name, value) {
if (typeof value !== "function") {
throw new TypeError("Cannot `" + name + "` without `compiler`");
}
}
function assertUnfrozen(name, frozen) {
if (frozen) {
throw new Error(
"Cannot call `" + name + "` on a frozen processor.\nCreate a new processor first, by calling it: use `processor()` instead of `processor`."
);
}
}
function assertNode(node2) {
if (!isPlainObject(node2) || typeof node2.type !== "string") {
throw new TypeError("Expected node, got `" + node2 + "`");
}
}
function assertDone(name, asyncName, complete) {
if (!complete) {
throw new Error(
"`" + name + "` finished async. Use `" + asyncName + "` instead"
);
}
}
function vfile(value) {
return looksLikeAVFile(value) ? value : new VFile(value);
}
function looksLikeAVFile(value) {
return Boolean(
value && typeof value === "object" && "message" in value && "messages" in value
);
}
function looksLikeAValue(value) {
return typeof value === "string" || isUint8Array2(value);
}
function isUint8Array2(value) {
return Boolean(
value && typeof value === "object" && "byteLength" in value && "byteOffset" in value
);
}
// node_modules/react-markdown/lib/index.js
var changelog = "https://github.com/remarkjs/react-markdown/blob/main/changelog.md";
var emptyPlugins = [];
var emptyRemarkRehypeOptions = { allowDangerousHtml: true };
var safeProtocol = /^(https?|ircs?|mailto|xmpp)$/i;
var deprecations = [
{ from: "astPlugins", id: "remove-buggy-html-in-markdown-parser" },
{ from: "allowDangerousHtml", id: "remove-buggy-html-in-markdown-parser" },
{
from: "allowNode",
id: "replace-allownode-allowedtypes-and-disallowedtypes",
to: "allowElement"
},
{
from: "allowedTypes",
id: "replace-allownode-allowedtypes-and-disallowedtypes",
to: "allowedElements"
},
{ from: "className", id: "remove-classname" },
{
from: "disallowedTypes",
id: "replace-allownode-allowedtypes-and-disallowedtypes",
to: "disallowedElements"
},
{ from: "escapeHtml", id: "remove-buggy-html-in-markdown-parser" },
{ from: "includeElementIndex", id: "#remove-includeelementindex" },
{
from: "includeNodeIndex",
id: "change-includenodeindex-to-includeelementindex"
},
{ from: "linkTarget", id: "remove-linktarget" },
{ from: "plugins", id: "change-plugins-to-remarkplugins", to: "remarkPlugins" },
{ from: "rawSourcePos", id: "#remove-rawsourcepos" },
{ from: "renderers", id: "change-renderers-to-components", to: "components" },
{ from: "source", id: "change-source-to-children", to: "children" },
{ from: "sourcePos", id: "#remove-sourcepos" },
{ from: "transformImageUri", id: "#add-urltransform", to: "urlTransform" },
{ from: "transformLinkUri", id: "#add-urltransform", to: "urlTransform" }
];
function Markdown(options) {
const processor = createProcessor(options);
const file = createFile(options);
return post(processor.runSync(processor.parse(file), file), options);
}
async function MarkdownAsync(options) {
const processor = createProcessor(options);
const file = createFile(options);
const tree = await processor.run(processor.parse(file), file);
return post(tree, options);
}
function MarkdownHooks(options) {
const processor = createProcessor(options);
const [error, setError] = (0, import_react.useState)(
/** @type {Error | undefined} */
void 0
);
const [tree, setTree] = (0, import_react.useState)(
/** @type {Root | undefined} */
void 0
);
(0, import_react.useEffect)(
function() {
let cancelled = false;
const file = createFile(options);
processor.run(processor.parse(file), file, function(error2, tree2) {
if (!cancelled) {
setError(error2);
setTree(tree2);
}
});
return function() {
cancelled = true;
};
},
[
options.children,
options.rehypePlugins,
options.remarkPlugins,
options.remarkRehypeOptions
]
);
if (error) throw error;
return tree ? post(tree, options) : options.fallback;
}
function createProcessor(options) {
const rehypePlugins = options.rehypePlugins || emptyPlugins;
const remarkPlugins = options.remarkPlugins || emptyPlugins;
const remarkRehypeOptions = options.remarkRehypeOptions ? { ...options.remarkRehypeOptions, ...emptyRemarkRehypeOptions } : emptyRemarkRehypeOptions;
const processor = unified().use(remarkParse).use(remarkPlugins).use(remarkRehype, remarkRehypeOptions).use(rehypePlugins);
return processor;
}
function createFile(options) {
const children = options.children || "";
const file = new VFile();
if (typeof children === "string") {
file.value = children;
} else {
unreachable(
"Unexpected value `" + children + "` for `children` prop, expected `string`"
);
}
return file;
}
function post(tree, options) {
const allowedElements = options.allowedElements;
const allowElement = options.allowElement;
const components = options.components;
const disallowedElements = options.disallowedElements;
const skipHtml = options.skipHtml;
const unwrapDisallowed = options.unwrapDisallowed;
const urlTransform = options.urlTransform || defaultUrlTransform;
for (const deprecation of deprecations) {
if (Object.hasOwn(options, deprecation.from)) {
unreachable(
"Unexpected `" + deprecation.from + "` prop, " + (deprecation.to ? "use `" + deprecation.to + "` instead" : "remove it") + " (see <" + changelog + "#" + deprecation.id + "> for more info)"
);
}
}
if (allowedElements && disallowedElements) {
unreachable(
"Unexpected combined `allowedElements` and `disallowedElements`, expected one or the other"
);
}
visit(tree, transform);
return toJsxRuntime(tree, {
Fragment: import_jsx_runtime.Fragment,
components,
ignoreInvalidStyle: true,
jsx: import_jsx_runtime.jsx,
jsxs: import_jsx_runtime.jsxs,
passKeys: true,
passNode: true
});
function transform(node2, index, parent) {
if (node2.type === "raw" && parent && typeof index === "number") {
if (skipHtml) {
parent.children.splice(index, 1);
} else {
parent.children[index] = { type: "text", value: node2.value };
}
return index;
}
if (node2.type === "element") {
let key;
for (key in urlAttributes) {
if (Object.hasOwn(urlAttributes, key) && Object.hasOwn(node2.properties, key)) {
const value = node2.properties[key];
const test = urlAttributes[key];
if (test === null || test.includes(node2.tagName)) {
node2.properties[key] = urlTransform(String(value || ""), key, node2);
}
}
}
}
if (node2.type === "element") {
let remove = allowedElements ? !allowedElements.includes(node2.tagName) : disallowedElements ? disallowedElements.includes(node2.tagName) : false;
if (!remove && allowElement && typeof index === "number") {
remove = !allowElement(node2, index, parent);
}
if (remove && parent && typeof index === "number") {
if (unwrapDisallowed && node2.children) {
parent.children.splice(index, 1, ...node2.children);
} else {
parent.children.splice(index, 1);
}
return index;
}
}
}
}
function defaultUrlTransform(value) {
const colon = value.indexOf(":");
const questionMark = value.indexOf("?");
const numberSign = value.indexOf("#");
const slash = value.indexOf("/");
if (
// If there is no protocol, its relative.
colon === -1 || // If the first colon is after a `?`, `#`, or `/`, its not a protocol.
slash !== -1 && colon > slash || questionMark !== -1 && colon > questionMark || numberSign !== -1 && colon > numberSign || // It is a protocol, it should be allowed.
safeProtocol.test(value.slice(0, colon))
) {
return value;
}
return "";
}
export {
MarkdownAsync,
MarkdownHooks,
Markdown as default,
defaultUrlTransform
};
//# sourceMappingURL=react-markdown.js.map