codigo0/node_modules/micromark-extension-frontmatter/lib/syntax.js
planetazuzu 5d7a6500fe refactor: Fase 1 - Clean Architecture, refactorización modular y eliminación de duplicidades
-  Ticket 1.1: Estructura Clean Architecture en backend
-  Ticket 1.2: Schemas Zod compartidos
-  Ticket 1.3: Refactorización drugs.ts (1362 → 8 archivos modulares)
-  Ticket 1.4: Refactorización procedures.ts (3583 → 6 archivos modulares)
-  Ticket 1.5: Eliminación de duplicidades (~50 líneas)

Cambios principales:
- Creada estructura Clean Architecture en backend/src/
- Schemas Zod compartidos en backend/src/shared/schemas/
- Refactorización modular de drugs y procedures
- Utilidades genéricas en src/utils/ (filter, validation)
- Eliminados scripts obsoletos y documentación antigua
- Corregidos errores: QueryClient, import test-error-handling
- Build verificado y funcionando correctamente
2026-01-25 21:09:47 +01:00

395 lines
8.6 KiB
JavaScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

/**
* @typedef {import('micromark-util-types').Construct} Construct
* @typedef {import('micromark-util-types').ConstructRecord} ConstructRecord
* @typedef {import('micromark-util-types').Extension} Extension
* @typedef {import('micromark-util-types').State} State
* @typedef {import('micromark-util-types').TokenType} TokenType
* @typedef {import('micromark-util-types').TokenizeContext} TokenizeContext
* @typedef {import('micromark-util-types').Tokenizer} Tokenizer
*
* @typedef {import('./to-matters.js').Info} Info
* @typedef {import('./to-matters.js').Matter} Matter
* @typedef {import('./to-matters.js').Options} Options
*/
import {markdownLineEnding, markdownSpace} from 'micromark-util-character'
import {toMatters} from './to-matters.js'
/**
* Create an extension for `micromark` to enable frontmatter syntax.
*
* @param {Options | null | undefined} [options='yaml']
* Configuration (default: `'yaml'`).
* @returns {Extension}
* Extension for `micromark` that can be passed in `extensions`, to
* enable frontmatter syntax.
*/
export function frontmatter(options) {
const matters = toMatters(options)
/** @type {ConstructRecord} */
const flow = {}
let index = -1
while (++index < matters.length) {
const matter = matters[index]
const code = fence(matter, 'open').charCodeAt(0)
const construct = createConstruct(matter)
const existing = flow[code]
if (Array.isArray(existing)) {
existing.push(construct)
} else {
// Never a single object, always an array.
flow[code] = [construct]
}
}
return {
flow
}
}
/**
* @param {Matter} matter
* @returns {Construct}
*/
function createConstruct(matter) {
const anywhere = matter.anywhere
const frontmatterType = /** @type {TokenType} */ matter.type
const fenceType = /** @type {TokenType} */ frontmatterType + 'Fence'
const sequenceType = /** @type {TokenType} */ fenceType + 'Sequence'
const valueType = /** @type {TokenType} */ frontmatterType + 'Value'
const closingFenceConstruct = {
tokenize: tokenizeClosingFence,
partial: true
}
/**
* Fence to look for.
*
* @type {string}
*/
let buffer
let bufferIndex = 0
return {
tokenize: tokenizeFrontmatter,
concrete: true
}
/**
* @this {TokenizeContext}
* @type {Tokenizer}
*/
function tokenizeFrontmatter(effects, ok, nok) {
const self = this
return start
/**
* Start of frontmatter.
*
* ```markdown
* > | ---
* ^
* | title: "Venus"
* | ---
* ```
*
* @type {State}
*/
function start(code) {
const position = self.now()
if (
// Indent not allowed.
position.column === 1 &&
// Normally, only allowed in first line.
(position.line === 1 || anywhere)
) {
buffer = fence(matter, 'open')
bufferIndex = 0
if (code === buffer.charCodeAt(bufferIndex)) {
effects.enter(frontmatterType)
effects.enter(fenceType)
effects.enter(sequenceType)
return openSequence(code)
}
}
return nok(code)
}
/**
* In open sequence.
*
* ```markdown
* > | ---
* ^
* | title: "Venus"
* | ---
* ```
*
* @type {State}
*/
function openSequence(code) {
if (bufferIndex === buffer.length) {
effects.exit(sequenceType)
if (markdownSpace(code)) {
effects.enter('whitespace')
return openSequenceWhitespace(code)
}
return openAfter(code)
}
if (code === buffer.charCodeAt(bufferIndex++)) {
effects.consume(code)
return openSequence
}
return nok(code)
}
/**
* In whitespace after open sequence.
*
* ```markdown
* > | ---␠
* ^
* | title: "Venus"
* | ---
* ```
*
* @type {State}
*/
function openSequenceWhitespace(code) {
if (markdownSpace(code)) {
effects.consume(code)
return openSequenceWhitespace
}
effects.exit('whitespace')
return openAfter(code)
}
/**
* After open sequence.
*
* ```markdown
* > | ---
* ^
* | title: "Venus"
* | ---
* ```
*
* @type {State}
*/
function openAfter(code) {
if (markdownLineEnding(code)) {
effects.exit(fenceType)
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
// Get ready for closing fence.
buffer = fence(matter, 'close')
bufferIndex = 0
return effects.attempt(closingFenceConstruct, after, contentStart)
}
// EOF is not okay.
return nok(code)
}
/**
* Start of content chunk.
*
* ```markdown
* | ---
* > | title: "Venus"
* ^
* | ---
* ```
*
* @type {State}
*/
function contentStart(code) {
if (code === null || markdownLineEnding(code)) {
return contentEnd(code)
}
effects.enter(valueType)
return contentInside(code)
}
/**
* In content chunk.
*
* ```markdown
* | ---
* > | title: "Venus"
* ^
* | ---
* ```
*
* @type {State}
*/
function contentInside(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit(valueType)
return contentEnd(code)
}
effects.consume(code)
return contentInside
}
/**
* End of content chunk.
*
* ```markdown
* | ---
* > | title: "Venus"
* ^
* | ---
* ```
*
* @type {State}
*/
function contentEnd(code) {
// Require a closing fence.
if (code === null) {
return nok(code)
}
// Can only be an eol.
effects.enter('lineEnding')
effects.consume(code)
effects.exit('lineEnding')
return effects.attempt(closingFenceConstruct, after, contentStart)
}
/**
* After frontmatter.
*
* ```markdown
* | ---
* | title: "Venus"
* > | ---
* ^
* ```
*
* @type {State}
*/
function after(code) {
// `code` must be eol/eof.
effects.exit(frontmatterType)
return ok(code)
}
}
/** @type {Tokenizer} */
function tokenizeClosingFence(effects, ok, nok) {
let bufferIndex = 0
return closeStart
/**
* Start of close sequence.
*
* ```markdown
* | ---
* | title: "Venus"
* > | ---
* ^
* ```
*
* @type {State}
*/
function closeStart(code) {
if (code === buffer.charCodeAt(bufferIndex)) {
effects.enter(fenceType)
effects.enter(sequenceType)
return closeSequence(code)
}
return nok(code)
}
/**
* In close sequence.
*
* ```markdown
* | ---
* | title: "Venus"
* > | ---
* ^
* ```
*
* @type {State}
*/
function closeSequence(code) {
if (bufferIndex === buffer.length) {
effects.exit(sequenceType)
if (markdownSpace(code)) {
effects.enter('whitespace')
return closeSequenceWhitespace(code)
}
return closeAfter(code)
}
if (code === buffer.charCodeAt(bufferIndex++)) {
effects.consume(code)
return closeSequence
}
return nok(code)
}
/**
* In whitespace after close sequence.
*
* ```markdown
* > | ---
* | title: "Venus"
* | ---␠
* ^
* ```
*
* @type {State}
*/
function closeSequenceWhitespace(code) {
if (markdownSpace(code)) {
effects.consume(code)
return closeSequenceWhitespace
}
effects.exit('whitespace')
return closeAfter(code)
}
/**
* After close sequence.
*
* ```markdown
* | ---
* | title: "Venus"
* > | ---
* ^
* ```
*
* @type {State}
*/
function closeAfter(code) {
if (code === null || markdownLineEnding(code)) {
effects.exit(fenceType)
return ok(code)
}
return nok(code)
}
}
}
/**
* @param {Matter} matter
* @param {'close' | 'open'} prop
* @returns {string}
*/
function fence(matter, prop) {
return matter.marker
? pick(matter.marker, prop).repeat(3)
: // @ts-expect-error: Theyre mutually exclusive.
pick(matter.fence, prop)
}
/**
* @param {Info | string} schema
* @param {'close' | 'open'} prop
* @returns {string}
*/
function pick(schema, prop) {
return typeof schema === 'string' ? schema : schema[prop]
}